Neon C2 microkernels switch to rndnu from gemmlowp
- full set of microkernels for rndnu
- production microkernels for gemmlowp
PiperOrigin-RevId: 405528275
diff --git a/BUILD.bazel b/BUILD.bazel
index 24edf67..3644f60 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -2483,8 +2483,8 @@
"src/qs8-gemm/gen/1x8-minmax-rndnu-neon-mull-addw-dup.c",
"src/qs8-gemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c",
"src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/1x8c8-minmax-fp32-neon-mlal-padal.c",
"src/qs8-gemm/gen/1x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c",
@@ -2495,8 +2495,8 @@
"src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mlal-lane-prfm.c",
"src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mlal-lane.c",
"src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/1x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/1x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/1x16c16-minmax-gemmlowp-neon-mlal-padal.c",
@@ -2504,8 +2504,8 @@
"src/qs8-gemm/gen/2x8-minmax-rndnu-neon-mull-addw-dup.c",
"src/qs8-gemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c",
"src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/2x8c8-minmax-fp32-neon-mlal-padal.c",
"src/qs8-gemm/gen/2x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/2x8c8-minmax-gemmlowp-neon-mull-padal.c",
@@ -2513,29 +2513,29 @@
"src/qs8-gemm/gen/2x8c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/2x16-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-gemm/gen/2x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/2x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/2x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/2x16c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/3x8-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-gemm/gen/3x8-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/3x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/3x8c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/3x8c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/3x16-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-gemm/gen/3x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/3x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/3x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/3x16c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/4x8-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-gemm/gen/4x8-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/4x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/4x8c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/4x8c16-minmax-gemmlowp-neon-mlal-padal.c",
@@ -2544,8 +2544,8 @@
"src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mlal-lane-prfm.c",
"src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mlal-lane.c",
"src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-gemm/gen/4x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-gemm/gen/4x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-gemm/gen/4x16c16-minmax-gemmlowp-neon-mlal-padal.c",
@@ -2556,8 +2556,8 @@
"src/qs8-igemm/gen/1x8-minmax-rndnu-neon-mull-addw-dup.c",
"src/qs8-igemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c",
"src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/1x8c8-minmax-fp32-neon-mlal-padal.c",
"src/qs8-igemm/gen/1x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c",
@@ -2568,8 +2568,8 @@
"src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mlal-lane-prfm.c",
"src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mlal-lane.c",
"src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/1x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/1x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/1x16c16-minmax-gemmlowp-neon-mlal-padal.c",
@@ -2577,8 +2577,8 @@
"src/qs8-igemm/gen/2x8-minmax-rndnu-neon-mull-addw-dup.c",
"src/qs8-igemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c",
"src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/2x8c8-minmax-fp32-neon-mlal-padal.c",
"src/qs8-igemm/gen/2x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/2x8c8-minmax-gemmlowp-neon-mull-padal.c",
@@ -2586,29 +2586,29 @@
"src/qs8-igemm/gen/2x8c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/2x16-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-igemm/gen/2x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/2x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/2x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/2x16c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/3x8-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-igemm/gen/3x8-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/3x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/3x8c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/3x8c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/3x16-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-igemm/gen/3x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/3x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/3x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/3x16c16-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/4x8-minmax-gemmlowp-neon-mlal-lane.c",
"src/qs8-igemm/gen/4x8-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/4x8c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/4x8c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/4x8c16-minmax-gemmlowp-neon-mlal-padal.c",
@@ -2617,8 +2617,8 @@
"src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mlal-lane-prfm.c",
"src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mlal-lane.c",
"src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c",
- "src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c",
- "src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c",
+ "src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c",
+ "src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c",
"src/qs8-igemm/gen/4x16c8-minmax-gemmlowp-neon-mlal-padal.c",
"src/qs8-igemm/gen/4x16c8-minmax-gemmlowp-neon-mull-padal.c",
"src/qs8-igemm/gen/4x16c16-minmax-gemmlowp-neon-mlal-padal.c",
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 51bb58d..ba5efd6 100755
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1535,8 +1535,8 @@
src/qs8-gemm/gen/1x8-minmax-rndnu-neon-mull-addw-dup.c
src/qs8-gemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c
src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/1x8c8-minmax-fp32-neon-mlal-padal.c
src/qs8-gemm/gen/1x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c
@@ -1547,8 +1547,8 @@
src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mlal-lane-prfm.c
src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mlal-lane.c
src/qs8-gemm/gen/1x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/1x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/1x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/1x16c16-minmax-gemmlowp-neon-mlal-padal.c
@@ -1556,8 +1556,8 @@
src/qs8-gemm/gen/2x8-minmax-rndnu-neon-mull-addw-dup.c
src/qs8-gemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c
src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/2x8c8-minmax-fp32-neon-mlal-padal.c
src/qs8-gemm/gen/2x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/2x8c8-minmax-gemmlowp-neon-mull-padal.c
@@ -1565,29 +1565,29 @@
src/qs8-gemm/gen/2x8c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/2x16-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-gemm/gen/2x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/2x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/2x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/2x16c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/3x8-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-gemm/gen/3x8-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/3x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/3x8c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/3x8c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/3x16-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-gemm/gen/3x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/3x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/3x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/3x16c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/4x8-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-gemm/gen/4x8-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/4x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/4x8c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/4x8c16-minmax-gemmlowp-neon-mlal-padal.c
@@ -1596,8 +1596,8 @@
src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mlal-lane-prfm.c
src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mlal-lane.c
src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-gemm/gen/4x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-gemm/gen/4x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-gemm/gen/4x16c16-minmax-gemmlowp-neon-mlal-padal.c
@@ -1608,8 +1608,8 @@
src/qs8-igemm/gen/1x8-minmax-rndnu-neon-mull-addw-dup.c
src/qs8-igemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c
src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/1x8c8-minmax-fp32-neon-mlal-padal.c
src/qs8-igemm/gen/1x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c
@@ -1620,8 +1620,8 @@
src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mlal-lane-prfm.c
src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mlal-lane.c
src/qs8-igemm/gen/1x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/1x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/1x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/1x16c16-minmax-gemmlowp-neon-mlal-padal.c
@@ -1629,8 +1629,8 @@
src/qs8-igemm/gen/2x8-minmax-rndnu-neon-mull-addw-dup.c
src/qs8-igemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c
src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/2x8c8-minmax-fp32-neon-mlal-padal.c
src/qs8-igemm/gen/2x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/2x8c8-minmax-gemmlowp-neon-mull-padal.c
@@ -1638,29 +1638,29 @@
src/qs8-igemm/gen/2x8c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/2x16-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-igemm/gen/2x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/2x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/2x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/2x16c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/3x8-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-igemm/gen/3x8-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/3x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/3x8c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/3x8c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/3x16-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-igemm/gen/3x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/3x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/3x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/3x16c16-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/4x8-minmax-gemmlowp-neon-mlal-lane.c
src/qs8-igemm/gen/4x8-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/4x8c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/4x8c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/4x8c16-minmax-gemmlowp-neon-mlal-padal.c
@@ -1669,8 +1669,8 @@
src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mlal-lane-prfm.c
src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mlal-lane.c
src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c
- src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
- src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+ src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
+ src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
src/qs8-igemm/gen/4x16c8-minmax-gemmlowp-neon-mlal-padal.c
src/qs8-igemm/gen/4x16c8-minmax-gemmlowp-neon-mull-padal.c
src/qs8-igemm/gen/4x16c16-minmax-gemmlowp-neon-mlal-padal.c
diff --git a/bench/qs8-gemm-e2e.cc b/bench/qs8-gemm-e2e.cc
index a815ef7..020835e 100644
--- a/bench/qs8-gemm-e2e.cc
+++ b/bench/qs8-gemm-e2e.cc
@@ -376,181 +376,181 @@
}
#if XNN_ENABLE_FULL_BENCHMARKS
- static void qs8_gemm_1x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_1x8c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
1 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_1x16c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
1 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
#endif // XNN_ENABLE_FULL_BENCHMARKS
- static void qs8_gemm_2x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_2x8c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
2 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_2x16c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
2 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_3x8c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
3 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_3x16c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
3 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_4x8c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
4 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_4x16c2__neon_mlal_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
4 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
#if XNN_ENABLE_FULL_BENCHMARKS
- static void qs8_gemm_1x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_1x8c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
1 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_1x16c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
1 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
#endif // XNN_ENABLE_FULL_BENCHMARKS
- static void qs8_gemm_2x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_2x8c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
2 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_2x16c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
2 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_3x8c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
3 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_3x16c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
3 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_4x8c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
4 /* mr */, 8 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
+ static void qs8_gemm_4x16c2__neon_mull_padal_dup(benchmark::State& state, models::ExecutionPlanFactory model) {
GEMMEnd2EndBenchmark(state, model,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup,
- xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup,
+ xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params,
4 /* mr */, 16 /* nr */, 1 /* log2_kr */, 0 /* log2_sr */,
benchmark::utils::CheckNEON);
}
@@ -927,26 +927,26 @@
BENCHMARK_QS8_END2END(qs8_gemm_4x16c16_gemmlowp__neon_mlal_padal);
#if XNN_ENABLE_FULL_BENCHMARKS
- BENCHMARK_QS8_END2END(qs8_gemm_1x8c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_1x16c2_gemmlowp__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_1x8c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_1x16c2__neon_mlal_padal_dup);
#endif // XNN_ENABLE_FULL_BENCHMARKS
- BENCHMARK_QS8_END2END(qs8_gemm_2x8c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_2x16c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_3x8c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_3x16c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_4x8c2_gemmlowp__neon_mlal_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_4x16c2_gemmlowp__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_2x8c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_2x16c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_3x8c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_3x16c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_4x8c2__neon_mlal_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_4x16c2__neon_mlal_padal_dup);
#if XNN_ENABLE_FULL_BENCHMARKS
- BENCHMARK_QS8_END2END(qs8_gemm_1x8c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_1x16c2_gemmlowp__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_1x8c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_1x16c2__neon_mull_padal_dup);
#endif // XNN_ENABLE_FULL_BENCHMARKS
- BENCHMARK_QS8_END2END(qs8_gemm_2x8c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_2x16c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_3x8c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_3x16c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_4x8c2_gemmlowp__neon_mull_padal_dup);
- BENCHMARK_QS8_END2END(qs8_gemm_4x16c2_gemmlowp__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_2x8c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_2x16c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_3x8c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_3x16c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_4x8c2__neon_mull_padal_dup);
+ BENCHMARK_QS8_END2END(qs8_gemm_4x16c2__neon_mull_padal_dup);
#if XNN_ENABLE_FULL_BENCHMARKS
BENCHMARK_QS8_END2END(qs8_gemm_1x8_gemmlowp__neon_mlal_lane);
diff --git a/bench/qs8-gemm.cc b/bench/qs8-gemm.cc
index ad1245d..1979fb4 100644
--- a/bench/qs8-gemm.cc
+++ b/bench/qs8-gemm.cc
@@ -347,69 +347,69 @@
GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_6x16__neon_mlal_lane, 6, 16, 1, 1,
xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, 1, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_1x8c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, 1, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, 2, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_2x8c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, 2, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, 3, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_3x8c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, 3, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x8c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, 4, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_4x8c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, 4, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, 1, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_1x16c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, 1, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, 2, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_2x16c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, 2, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, 3, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_3x16c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, 3, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x16c2_gemmlowp__neon_mull_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, 4, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_4x16c2__neon_mull_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, 4, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, 1, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_1x8c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, 1, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, 2, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_2x8c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, 2, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, 3, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_3x8c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, 3, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x8c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, 4, 8, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_4x8c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, 4, 8, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_1x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, 1, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_1x16c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, 1, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_2x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, 2, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_2x16c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, 2, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_3x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, 3, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_3x16c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, 3, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
- static void qs8_gemm_4x16c2_gemmlowp__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
- GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, 4, 16, 2, 1,
- xnn_init_qs8_conv_minmax_gemmlowp_neon_params, benchmark::utils::CheckNEON);
+ static void qs8_gemm_4x16c2__neon_mlal_padal_dup(benchmark::State& state, const char* net) {
+ GEMMBenchmark(state, xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, 4, 16, 2, 1,
+ xnn_init_qs8_conv_minmax_rndnu_neon_params, benchmark::utils::CheckNEON);
}
static void qs8_gemm_1x8c8_gemmlowp__neon_mull_padal(benchmark::State& state, const char* net) {
GEMMBenchmark(state, xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c8__neon_mull_padal, 1, 8, 8, 1,
@@ -550,22 +550,22 @@
BENCHMARK_GEMM(qs8_gemm_3x16_gemmlowp__neon_mlal_lane)
BENCHMARK_GEMM(qs8_gemm_4x16_gemmlowp__neon_mlal_lane)
BENCHMARK_GEMM(qs8_gemm_6x16_gemmlowp__neon_mlal_lane)
- BENCHMARK_GEMM(qs8_gemm_1x8c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_2x8c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_3x8c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_4x8c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_1x16c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_2x16c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_3x16c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_4x16c2_gemmlowp__neon_mull_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_1x8c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_2x8c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_3x8c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_4x8c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_1x16c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_2x16c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_3x16c2_gemmlowp__neon_mlal_padal_dup)
- BENCHMARK_GEMM(qs8_gemm_4x16c2_gemmlowp__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_1x8c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_2x8c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_3x8c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_4x8c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_1x16c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_2x16c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_3x16c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_4x16c2__neon_mull_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_1x8c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_2x8c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_3x8c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_4x8c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_1x16c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_2x16c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_3x16c2__neon_mlal_padal_dup)
+ BENCHMARK_GEMM(qs8_gemm_4x16c2__neon_mlal_padal_dup)
BENCHMARK_GEMM(qs8_gemm_1x8c8_gemmlowp__neon_mull_padal)
BENCHMARK_GEMM(qs8_gemm_2x8c8_gemmlowp__neon_mull_padal)
BENCHMARK_GEMM(qs8_gemm_3x8c8_gemmlowp__neon_mull_padal)
diff --git a/scripts/generate-qs8-gemm.sh b/scripts/generate-qs8-gemm.sh
index da11b47..7a19707 100755
--- a/scripts/generate-qs8-gemm.sh
+++ b/scripts/generate-qs8-gemm.sh
@@ -239,14 +239,14 @@
tools/xngen src/qs8-gemm/neon-mull-addw-dup.c.in -D MR=4 -D NR=16 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -o src/qs8-gemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c &
### C2 micro-kernels
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c &
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c &
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c &
@@ -260,17 +260,17 @@
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=1 -D ARMV8=1 -o src/qc8-gemm/gen/1x8c2-minmax-fp32-neonv8-mlal-padal-dup.c &
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=1 -D ARMV8=1 -o src/qc8-gemm/gen/2x8c2-minmax-fp32-neonv8-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-gemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
### C8 micro-kernels
tools/xngen src/qs8-gemm/c8-neon-mull-padal.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-gemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c &
diff --git a/scripts/generate-qs8-igemm.sh b/scripts/generate-qs8-igemm.sh
index 1989dc6..e8038f1 100755
--- a/scripts/generate-qs8-igemm.sh
+++ b/scripts/generate-qs8-igemm.sh
@@ -245,14 +245,14 @@
tools/xngen src/qs8-igemm/neon-mull-addw-dup.c.in -D MR=4 -D NR=16 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -o src/qs8-igemm/gen/4x16-minmax-rndnu-neon-mull-addw-dup.c &
### C2 micro-kernels
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=0 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c &
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-fp32-neon-mlal-padal-dup.c &
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-fp32-neon-mlal-padal-dup.c &
@@ -266,17 +266,17 @@
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=1 -D ARMV8=1 -o src/qc8-igemm/gen/1x8c2-minmax-fp32-neonv8-mlal-padal-dup.c &
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=FP32 -D CHANNELWISE=1 -D ARMV8=1 -o src/qc8-igemm/gen/2x8c2-minmax-fp32-neonv8-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
-
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=8 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=3 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=4 -D NR=16 -D MLA=1 -D REQUANTIZATION=RNDNU -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c &
+
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=1 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
+tools/xngen src/qs8-igemm/c2-neon-mull-padal-dup.c.in -D MR=2 -D NR=8 -D MLA=1 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/2x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c &
### C8 micro-kernels
tools/xngen src/qs8-igemm/c8-neon-mull-padal.c.in -D MR=1 -D NR=8 -D MLA=0 -D REQUANTIZATION=GEMMLOWP -D CHANNELWISE=0 -D ARMV8=0 -o src/qs8-igemm/gen/1x8c8-minmax-gemmlowp-neon-mull-padal.c &
diff --git a/src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index 607ca16..ec8100d 100644
--- a/src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -243,26 +243,26 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -274,8 +274,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
index c90d67e..42c57a2 100644
--- a/src/qs8-gemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -155,26 +155,26 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -186,8 +186,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c b/src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
new file mode 100644
index 0000000..af06dba
--- /dev/null
+++ b/src/qs8-gemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -0,0 +1,169 @@
+// Auto-generated file. Do not edit!
+// Template: src/qs8-gemm/c2-neon-mull-padal-dup.c.in
+// Generator: tools/xngen
+//
+// Copyright 2021 Google LLC
+//
+// This source code is licensed under the BSD-style license found in the
+// LICENSE file in the root directory of this source tree.
+
+#include <assert.h>
+
+#include <arm_neon.h>
+
+#include <xnnpack/gemm.h>
+#include <xnnpack/math.h>
+
+
+void xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup(
+ size_t mr,
+ size_t nc,
+ size_t kc,
+ const int8_t* restrict a,
+ size_t a_stride,
+ const void* restrict w,
+ int8_t* restrict c,
+ size_t cm_stride,
+ size_t cn_stride,
+ const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_DISABLE_TSAN XNN_DISABLE_MSAN
+{
+ assert(mr != 0);
+ assert(mr <= 1);
+ assert(nc != 0);
+ assert(kc != 0);
+ assert(kc % sizeof(int8_t) == 0);
+ assert(a != NULL);
+ assert(w != NULL);
+ assert(c != NULL);
+
+ kc = round_up_po2(kc, 2 * sizeof(int8_t));
+ const int8_t* a0 = a;
+ int8_t* c0 = c;
+
+ do {
+ int32x4_t vacc0x0123 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
+ int32x4_t vacc0x4567 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
+
+ size_t k = kc;
+
+
+ while (k >= 8 * sizeof(int8_t)) {
+ const int8x8_t va0 = vld1_s8(a0); a0 += 8;
+
+ const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c3 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c3 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ const int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ const int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ const int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 3)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c3);
+ const int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ const int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ const int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ const int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 3)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c3);
+
+ k -= 8 * sizeof(int8_t);
+ }
+
+ if XNN_UNLIKELY(k != 0) {
+ const int8x8_t va0 = vld1_s8(a0); a0 = (const int8_t*) ((uintptr_t) a0 + k);
+
+ const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
+ const int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
+
+ if (k > 2 * sizeof(int8_t)) {
+ const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
+ const int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
+
+ if (k > 4 * sizeof(int8_t)) {
+ const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
+ const int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
+ }
+ }
+ }
+
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
+
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
+#if XNN_ARCH_ARM64
+ const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
+
+ int8x8_t vout0x01234567 = vqmovn_s16(vacc0x01234567);
+#else
+ const int16x8_t vacc0x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc0x0123), vqmovn_s32(vacc0x4567)), voutput_zero_point);
+
+ int8x8_t vout0x01234567 = vqmovn_s16(vacc0x01234567);
+#endif
+ const int8x8_t voutput_min = vld1_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x8_t voutput_max = vld1_dup_s8(¶ms->rndnu_neon.output_max);
+
+ vout0x01234567 = vmax_s8(vout0x01234567, voutput_min);
+
+ vout0x01234567 = vmin_s8(vout0x01234567, voutput_max);
+
+ if (nc >= 8) {
+ vst1_s8(c0 + 0, vout0x01234567);
+
+ c0 = (int8_t*) ((uintptr_t) c0 + cn_stride);
+
+ a0 = (const int8_t*) ((uintptr_t) a0 - kc);
+
+ nc -= 8;
+ } else {
+ // Final case where not all of the 8 columns fit in the destination.
+ if (nc & 4) {
+ vst1_lane_u32(__builtin_assume_aligned(c0, 1), vreinterpret_u32_s8(vout0x01234567), 0); c0 += 4;
+ vout0x01234567 = vext_s8(vout0x01234567, vout0x01234567, 4);
+ }
+ if (nc & 2) {
+ vst1_lane_u16(__builtin_assume_aligned(c0, 1), vreinterpret_u16_s8(vout0x01234567), 0); c0 += 2;
+ vout0x01234567 = vext_s8(vout0x01234567, vout0x01234567, 2);
+ }
+ if (nc & 1) {
+ vst1_lane_s8(c0, vout0x01234567, 0);
+ }
+
+ nc = 0;
+ }
+ } while (nc != 0);
+}
diff --git a/src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index cdd629c..3cf41d1 100644
--- a/src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -361,38 +361,38 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -410,8 +410,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
index 8580695..b15747b 100644
--- a/src/qs8-gemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -223,38 +223,38 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -272,8 +272,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 65%
copy from src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
copy to src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 70ccc2c..7448c58 100644
--- a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -28,7 +28,7 @@
const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_DISABLE_TSAN XNN_DISABLE_MSAN
{
assert(mr != 0);
- assert(mr <= 3);
+ assert(mr <= 2);
assert(nc != 0);
assert(kc != 0);
assert(kc % sizeof(int8_t) == 0);
@@ -41,24 +41,16 @@
int8_t* c0 = c;
const int8_t* a1 = (const int8_t*) ((uintptr_t) a0 + a_stride);
int8_t* c1 = (int8_t*) ((uintptr_t) c0 + cm_stride);
- if XNN_UNPREDICTABLE(mr < 2) {
+ if XNN_UNPREDICTABLE(mr != 2) {
a1 = a0;
c1 = c0;
}
- const int8_t* a2 = (const int8_t*) ((uintptr_t) a1 + a_stride);
- int8_t* c2 = (int8_t*) ((uintptr_t) c1 + cm_stride);
- if XNN_UNPREDICTABLE(mr <= 2) {
- a2 = a1;
- c2 = c1;
- }
do {
int32x4_t vacc0x0123 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
int32x4_t vacc0x4567 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
int32x4_t vacc1x0123 = vacc0x0123;
int32x4_t vacc1x4567 = vacc0x4567;
- int32x4_t vacc2x0123 = vacc0x0123;
- int32x4_t vacc2x4567 = vacc0x4567;
size_t k = kc;
@@ -66,7 +58,6 @@
while (k >= 8 * sizeof(int8_t)) {
const int8x8_t va0 = vld1_s8(a0); a0 += 8;
const int8x8_t va1 = vld1_s8(a1); a1 += 8;
- const int8x8_t va2 = vld1_s8(a2); a2 += 8;
const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -109,22 +100,6 @@
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c3);
- const int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- const int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- const int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- const int16x8_t vprod2x0123c3 = vmull_s8(vb0123c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 3)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c3);
- const int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- const int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- const int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- const int16x8_t vprod2x4567c3 = vmull_s8(vb4567c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 3)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c3);
k -= 8 * sizeof(int8_t);
}
@@ -132,7 +107,6 @@
if XNN_UNLIKELY(k != 0) {
const int8x8_t va0 = vld1_s8(a0); a0 = (const int8_t*) ((uintptr_t) a0 + k);
const int8x8_t va1 = vld1_s8(a1); a1 = (const int8_t*) ((uintptr_t) a1 + k);
- const int8x8_t va2 = vld1_s8(a2); a2 = (const int8_t*) ((uintptr_t) a2 + k);
const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -145,10 +119,6 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c0);
const int16x8_t vprod1x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 0)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c0);
- const int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
- const int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
if (k > 2 * sizeof(int8_t)) {
const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -162,10 +132,6 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c1);
const int16x8_t vprod1x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 1)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
- const int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
- const int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
if (k > 4 * sizeof(int8_t)) {
const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -179,76 +145,57 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c2);
const int16x8_t vprod1x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 2)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
- const int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
- const int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
}
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
- const int16x8_t vacc2x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc2x0123), vacc2x4567), voutput_zero_point);
int8x16_t vout0x01234567_1x01234567 = vqmovn_high_s16(vqmovn_s16(vacc0x01234567), vacc1x01234567);
- int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#else
const int16x8_t vacc0x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc0x0123), vqmovn_s32(vacc0x4567)), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc1x0123), vqmovn_s32(vacc1x4567)), voutput_zero_point);
- const int16x8_t vacc2x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc2x0123), vqmovn_s32(vacc2x4567)), voutput_zero_point);
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
- int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
- vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
vout0x01234567_1x01234567 = vminq_s8(vout0x01234567_1x01234567, voutput_max);
- vout2x01234567 = vmin_s8(vout2x01234567, vget_low_s8(voutput_max));
if (nc >= 8) {
vst1_s8(c0 + 0, vget_low_s8(vout0x01234567_1x01234567));
vst1_s8(c1 + 0, vget_high_s8(vout0x01234567_1x01234567));
- vst1_s8(c2 + 0, vout2x01234567);
c0 = (int8_t*) ((uintptr_t) c0 + cn_stride);
c1 = (int8_t*) ((uintptr_t) c1 + cn_stride);
- c2 = (int8_t*) ((uintptr_t) c2 + cn_stride);
a0 = (const int8_t*) ((uintptr_t) a0 - kc);
a1 = (const int8_t*) ((uintptr_t) a1 - kc);
- a2 = (const int8_t*) ((uintptr_t) a2 - kc);
nc -= 8;
} else {
@@ -256,21 +203,16 @@
if (nc & 4) {
vst1q_lane_u32(__builtin_assume_aligned(c0, 1), vreinterpretq_u32_s8(vout0x01234567_1x01234567), 0); c0 += 4;
vst1q_lane_u32(__builtin_assume_aligned(c1, 1), vreinterpretq_u32_s8(vout0x01234567_1x01234567), 2); c1 += 4;
- vst1_lane_u32(__builtin_assume_aligned(c2, 1), vreinterpret_u32_s8(vout2x01234567), 0); c2 += 4;
vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4);
- vout2x01234567 = vext_s8(vout2x01234567, vout2x01234567, 4);
}
if (nc & 2) {
vst1q_lane_u16(__builtin_assume_aligned(c0, 1), vreinterpretq_u16_s8(vout0x01234567_1x01234567), 0); c0 += 2;
vst1q_lane_u16(__builtin_assume_aligned(c1, 1), vreinterpretq_u16_s8(vout0x01234567_1x01234567), 4); c1 += 2;
- vst1_lane_u16(__builtin_assume_aligned(c2, 1), vreinterpret_u16_s8(vout2x01234567), 0); c2 += 2;
vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2);
- vout2x01234567 = vext_s8(vout2x01234567, vout2x01234567, 2);
}
if (nc & 1) {
vst1q_lane_s8(c0, vout0x01234567_1x01234567, 0);
vst1q_lane_s8(c1, vout0x01234567_1x01234567, 8);
- vst1_lane_s8(c2, vout2x01234567, 0);
}
nc = 0;
diff --git a/src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index 4554475..a259a0e 100644
--- a/src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -479,50 +479,50 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -546,8 +546,8 @@
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
index 1853ee5..b94944f 100644
--- a/src/qs8-gemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -291,50 +291,50 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -358,8 +358,8 @@
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
index 670d96e..c09a57b 100644
--- a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -287,32 +287,32 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -328,8 +328,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
diff --git a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 70ccc2c..9d9a129 100644
--- a/src/qs8-gemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -187,32 +187,32 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -228,8 +228,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
diff --git a/src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index ffb826e..9d1eb79 100644
--- a/src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -597,62 +597,62 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
- vacc3x89AB = vqrdmulhq_s32(vacc3x89AB, vmultiplier);
- vacc3xCDEF = vqrdmulhq_s32(vacc3xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
+ vacc3x89AB = vshlq_s32(vacc3x89AB, vright_pre_shift);
+ vacc3xCDEF = vshlq_s32(vacc3xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
- vacc3x89AB = vsraq_n_s32(vacc3x89AB, vbicq_s32(vacc3x89AB, vzero_shift_mask), 31);
- vacc3xCDEF = vsraq_n_s32(vacc3xCDEF, vbicq_s32(vacc3xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc3x89AB = vqdmulhq_s32(vacc3x89AB, vmultiplier);
+ vacc3xCDEF = vqdmulhq_s32(vacc3xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
- vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_shift);
- vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
+ vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_post_shift);
+ vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -682,8 +682,8 @@
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
int8x16_t vout3x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc3x01234567), vqmovn_s16(vacc3x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
index ea35f86..0396c36 100644
--- a/src/qs8-gemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -359,62 +359,62 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
- vacc3x89AB = vqrdmulhq_s32(vacc3x89AB, vmultiplier);
- vacc3xCDEF = vqrdmulhq_s32(vacc3xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
+ vacc3x89AB = vshlq_s32(vacc3x89AB, vright_pre_shift);
+ vacc3xCDEF = vshlq_s32(vacc3xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
- vacc3x89AB = vsraq_n_s32(vacc3x89AB, vbicq_s32(vacc3x89AB, vzero_shift_mask), 31);
- vacc3xCDEF = vsraq_n_s32(vacc3xCDEF, vbicq_s32(vacc3xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc3x89AB = vqdmulhq_s32(vacc3x89AB, vmultiplier);
+ vacc3xCDEF = vqdmulhq_s32(vacc3xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
- vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_shift);
- vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
+ vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_post_shift);
+ vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -444,8 +444,8 @@
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
int8x16_t vout3x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc3x01234567), vqmovn_s16(vacc3x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
index 8b143e4..72b9048 100644
--- a/src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -351,38 +351,38 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -400,8 +400,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x16_t vout2x01234567_3x01234567 = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc3x01234567));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
vout2x01234567_3x01234567 = vmaxq_s8(vout2x01234567_3x01234567, voutput_min);
diff --git a/src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 78d4241..1ce9959 100644
--- a/src/qs8-gemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-gemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup(
+void xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -225,38 +225,38 @@
}
}
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -274,8 +274,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x16_t vout2x01234567_3x01234567 = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc3x01234567));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
vout2x01234567_3x01234567 = vmaxq_s8(vout2x01234567_3x01234567, voutput_min);
diff --git a/src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index 226bca2..be3effe 100644
--- a/src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -256,26 +256,26 @@
p -= 1 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -287,8 +287,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
index b6a30da..a1426c1 100644
--- a/src/qs8-igemm/gen/1x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/1x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -168,26 +168,26 @@
p -= 1 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -199,8 +199,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c b/src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
new file mode 100644
index 0000000..983074f
--- /dev/null
+++ b/src/qs8-igemm/gen/1x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -0,0 +1,181 @@
+// Auto-generated file. Do not edit!
+// Template: src/qs8-igemm/c2-neon-mull-padal-dup.c.in
+// Generator: tools/xngen
+//
+// Copyright 2021 Google LLC
+//
+// This source code is licensed under the BSD-style license found in the
+// LICENSE file in the root directory of this source tree.
+
+#include <assert.h>
+
+#include <arm_neon.h>
+
+#include <xnnpack/gemm.h>
+#include <xnnpack/math.h>
+
+
+void xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup(
+ size_t mr,
+ size_t nc,
+ size_t kc,
+ size_t ks,
+ const int8_t** restrict a,
+ const void* restrict w,
+ int8_t* restrict c,
+ size_t cm_stride,
+ size_t cn_stride,
+ size_t a_offset,
+ const int8_t* zero,
+ const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_DISABLE_TSAN XNN_DISABLE_MSAN
+{
+ assert(mr != 0);
+ assert(mr <= 1);
+ assert(nc != 0);
+ assert(kc != 0);
+ assert(ks != 0);
+ assert(ks % (1 * sizeof(void*)) == 0);
+ assert(a_offset % sizeof(int8_t) == 0);
+ assert(a != NULL);
+ assert(w != NULL);
+ assert(c != NULL);
+
+ kc = round_up_po2(kc, 2 * sizeof(int8_t));
+ int8_t* c0 = c;
+
+ do {
+ int32x4_t vacc0x0123 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
+ int32x4_t vacc0x4567 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
+
+ size_t p = ks;
+ do {
+ const int8_t* restrict a0 = a[0];
+ if XNN_UNPREDICTABLE(a0 != zero) {
+ a0 = (const int8_t*) ((uintptr_t) a0 + a_offset);
+ }
+ a += 1;
+
+ size_t k = kc;
+
+
+ while (k >= 8 * sizeof(int8_t)) {
+ const int8x8_t va0 = vld1_s8(a0); a0 += 8;
+
+ const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb0123c3 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c3 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ const int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ const int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ const int16x8_t vprod0x0123c3 = vmull_s8(vb0123c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 3)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c3);
+ const int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ const int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ const int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ const int16x8_t vprod0x4567c3 = vmull_s8(vb4567c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 3)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c3);
+
+ k -= 8 * sizeof(int8_t);
+ }
+
+ if XNN_UNLIKELY(k != 0) {
+ const int8x8_t va0 = vld1_s8(a0); a0 = (const int8_t*) ((uintptr_t) a0 + k);
+
+ const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c0);
+ const int16x8_t vprod0x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 0)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c0);
+
+ if (k > 2 * sizeof(int8_t)) {
+ const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c1);
+ const int16x8_t vprod0x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 1)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c1);
+
+ if (k > 4 * sizeof(int8_t)) {
+ const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+ const int8x8_t vb4567c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
+
+ const int16x8_t vprod0x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ vacc0x0123 = vpadalq_s16(vacc0x0123, vprod0x0123c2);
+ const int16x8_t vprod0x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va0), 2)));
+ vacc0x4567 = vpadalq_s16(vacc0x4567, vprod0x4567c2);
+ }
+ }
+ }
+ p -= 1 * sizeof(void*);
+ } while (p != 0);
+
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
+
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
+#if XNN_ARCH_ARM64
+ const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
+
+ int8x8_t vout0x01234567 = vqmovn_s16(vacc0x01234567);
+#else
+ const int16x8_t vacc0x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc0x0123), vqmovn_s32(vacc0x4567)), voutput_zero_point);
+
+ int8x8_t vout0x01234567 = vqmovn_s16(vacc0x01234567);
+#endif
+ const int8x8_t voutput_min = vld1_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x8_t voutput_max = vld1_dup_s8(¶ms->rndnu_neon.output_max);
+
+ vout0x01234567 = vmax_s8(vout0x01234567, voutput_min);
+
+ vout0x01234567 = vmin_s8(vout0x01234567, voutput_max);
+
+ if (nc >= 8) {
+ vst1_s8(c0 + 0, vout0x01234567);
+
+ c0 = (int8_t*) ((uintptr_t) c0 + cn_stride);
+
+ a = (const int8_t**restrict) ((uintptr_t) a - ks);
+
+ nc -= 8;
+ } else {
+ if (nc & 4) {
+ vst1_lane_u32(__builtin_assume_aligned(c0, 1), vreinterpret_u32_s8(vout0x01234567), 0); c0 += 4;
+ vout0x01234567 = vext_s8(vout0x01234567, vout0x01234567, 4);
+ }
+ if (nc & 2) {
+ vst1_lane_u16(__builtin_assume_aligned(c0, 1), vreinterpret_u16_s8(vout0x01234567), 0); c0 += 2;
+ vout0x01234567 = vext_s8(vout0x01234567, vout0x01234567, 2);
+ }
+ if (nc & 1) {
+ vst1_lane_s8(c0, vout0x01234567, 0);
+ }
+
+ nc = 0;
+ }
+ } while (nc != 0);
+}
diff --git a/src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index ffa1bdb..92265f8 100644
--- a/src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -376,38 +376,38 @@
p -= 2 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -425,8 +425,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
index dfe11c9..a6bee90 100644
--- a/src/qs8-igemm/gen/2x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/2x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -238,38 +238,38 @@
p -= 2 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -287,8 +287,8 @@
int8x16_t vout0x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc0x89ABCDEF));
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
vout0x0123456789ABCDEF = vmaxq_s8(vout0x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 65%
copy from src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
copy to src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 71f6cb1..7cf1995 100644
--- a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/2x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -30,11 +30,11 @@
const union xnn_qs8_conv_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_DISABLE_TSAN XNN_DISABLE_MSAN
{
assert(mr != 0);
- assert(mr <= 3);
+ assert(mr <= 2);
assert(nc != 0);
assert(kc != 0);
assert(ks != 0);
- assert(ks % (3 * sizeof(void*)) == 0);
+ assert(ks % (2 * sizeof(void*)) == 0);
assert(a_offset % sizeof(int8_t) == 0);
assert(a != NULL);
assert(w != NULL);
@@ -43,21 +43,15 @@
kc = round_up_po2(kc, 2 * sizeof(int8_t));
int8_t* c0 = c;
int8_t* c1 = (int8_t*) ((uintptr_t) c0 + cm_stride);
- if XNN_UNPREDICTABLE(mr < 2) {
+ if XNN_UNPREDICTABLE(mr != 2) {
c1 = c0;
}
- int8_t* c2 = (int8_t*) ((uintptr_t) c1 + cm_stride);
- if XNN_UNPREDICTABLE(mr <= 2) {
- c2 = c1;
- }
do {
int32x4_t vacc0x0123 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
int32x4_t vacc0x4567 = vld1q_s32(w); w = (const void*) ((uintptr_t) w + 4 * sizeof(int32_t));
int32x4_t vacc1x0123 = vacc0x0123;
int32x4_t vacc1x4567 = vacc0x4567;
- int32x4_t vacc2x0123 = vacc0x0123;
- int32x4_t vacc2x4567 = vacc0x4567;
size_t p = ks;
do {
@@ -69,11 +63,7 @@
if XNN_UNPREDICTABLE(a1 != zero) {
a1 = (const int8_t*) ((uintptr_t) a1 + a_offset);
}
- const int8_t* restrict a2 = a[2];
- if XNN_UNPREDICTABLE(a2 != zero) {
- a2 = (const int8_t*) ((uintptr_t) a2 + a_offset);
- }
- a += 3;
+ a += 2;
size_t k = kc;
@@ -81,7 +71,6 @@
while (k >= 8 * sizeof(int8_t)) {
const int8x8_t va0 = vld1_s8(a0); a0 += 8;
const int8x8_t va1 = vld1_s8(a1); a1 += 8;
- const int8x8_t va2 = vld1_s8(a2); a2 += 8;
const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -124,22 +113,6 @@
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c3);
- const int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- const int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- const int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- const int16x8_t vprod2x0123c3 = vmull_s8(vb0123c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 3)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c3);
- const int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- const int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- const int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- const int16x8_t vprod2x4567c3 = vmull_s8(vb4567c3, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 3)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c3);
k -= 8 * sizeof(int8_t);
}
@@ -147,7 +120,6 @@
if XNN_UNLIKELY(k != 0) {
const int8x8_t va0 = vld1_s8(a0); a0 = (const int8_t*) ((uintptr_t) a0 + k);
const int8x8_t va1 = vld1_s8(a1); a1 = (const int8_t*) ((uintptr_t) a1 + k);
- const int8x8_t va2 = vld1_s8(a2); a2 = (const int8_t*) ((uintptr_t) a2 + k);
const int8x8_t vb0123c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
const int8x8_t vb4567c0 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -160,10 +132,6 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c0);
const int16x8_t vprod1x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 0)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c0);
- const int16x8_t vprod2x0123c0 = vmull_s8(vb0123c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c0);
- const int16x8_t vprod2x4567c0 = vmull_s8(vb4567c0, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 0)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c0);
if (k > 2 * sizeof(int8_t)) {
const int8x8_t vb0123c1 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -177,10 +145,6 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c1);
const int16x8_t vprod1x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 1)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c1);
- const int16x8_t vprod2x0123c1 = vmull_s8(vb0123c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c1);
- const int16x8_t vprod2x4567c1 = vmull_s8(vb4567c1, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 1)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c1);
if (k > 4 * sizeof(int8_t)) {
const int8x8_t vb0123c2 = vld1_s8(w); w = (const void*) ((uintptr_t) w + 8 * sizeof(int8_t));
@@ -194,72 +158,54 @@
vacc1x0123 = vpadalq_s16(vacc1x0123, vprod1x0123c2);
const int16x8_t vprod1x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va1), 2)));
vacc1x4567 = vpadalq_s16(vacc1x4567, vprod1x4567c2);
- const int16x8_t vprod2x0123c2 = vmull_s8(vb0123c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- vacc2x0123 = vpadalq_s16(vacc2x0123, vprod2x0123c2);
- const int16x8_t vprod2x4567c2 = vmull_s8(vb4567c2, vreinterpret_s8_s16(vdup_lane_s16(vreinterpret_s16_s8(va2), 2)));
- vacc2x4567 = vpadalq_s16(vacc2x4567, vprod2x4567c2);
}
}
}
- p -= 3 * sizeof(void*);
+ p -= 2 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
- const int16x8_t vacc2x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc2x0123), vacc2x4567), voutput_zero_point);
int8x16_t vout0x01234567_1x01234567 = vqmovn_high_s16(vqmovn_s16(vacc0x01234567), vacc1x01234567);
- int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#else
const int16x8_t vacc0x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc0x0123), vqmovn_s32(vacc0x4567)), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc1x0123), vqmovn_s32(vacc1x4567)), voutput_zero_point);
- const int16x8_t vacc2x01234567 = vqaddq_s16(vcombine_s16(vqmovn_s32(vacc2x0123), vqmovn_s32(vacc2x4567)), voutput_zero_point);
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
- int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
- vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
- vout2x01234567 = vmin_s8(vout2x01234567, vget_low_s8(voutput_max));
vout0x01234567_1x01234567 = vminq_s8(vout0x01234567_1x01234567, voutput_max);
if (nc >= 8) {
- vst1_s8(c2 + 0, vout2x01234567);
vst1_s8(c1 + 0, vget_high_s8(vout0x01234567_1x01234567));
vst1_s8(c0 + 0, vget_low_s8(vout0x01234567_1x01234567));
- c2 = (int8_t*) ((uintptr_t) c2 + cn_stride);
c1 = (int8_t*) ((uintptr_t) c1 + cn_stride);
c0 = (int8_t*) ((uintptr_t) c0 + cn_stride);
@@ -268,21 +214,16 @@
nc -= 8;
} else {
if (nc & 4) {
- vst1_lane_u32(__builtin_assume_aligned(c2, 1), vreinterpret_u32_s8(vout2x01234567), 0); c2 += 4;
vst1q_lane_u32(__builtin_assume_aligned(c1, 1), vreinterpretq_u32_s8(vout0x01234567_1x01234567), 2); c1 += 4;
vst1q_lane_u32(__builtin_assume_aligned(c0, 1), vreinterpretq_u32_s8(vout0x01234567_1x01234567), 0); c0 += 4;
- vout2x01234567 = vext_s8(vout2x01234567, vout2x01234567, 4);
vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 4);
}
if (nc & 2) {
- vst1_lane_u16(__builtin_assume_aligned(c2, 1), vreinterpret_u16_s8(vout2x01234567), 0); c2 += 2;
vst1q_lane_u16(__builtin_assume_aligned(c1, 1), vreinterpretq_u16_s8(vout0x01234567_1x01234567), 4); c1 += 2;
vst1q_lane_u16(__builtin_assume_aligned(c0, 1), vreinterpretq_u16_s8(vout0x01234567_1x01234567), 0); c0 += 2;
- vout2x01234567 = vext_s8(vout2x01234567, vout2x01234567, 2);
vout0x01234567_1x01234567 = vextq_s8(vout0x01234567_1x01234567, vout0x01234567_1x01234567, 2);
}
if (nc & 1) {
- vst1_lane_s8(c2, vout2x01234567, 0);
vst1q_lane_s8(c1, vout0x01234567_1x01234567, 8);
vst1q_lane_s8(c0, vout0x01234567_1x01234567, 0);
}
diff --git a/src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index 88934d9..7f898de 100644
--- a/src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -496,50 +496,50 @@
p -= 3 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -563,8 +563,8 @@
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x0123456789ABCDEF = vmaxq_s8(vout2x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
index 0497f77..e70a0d0 100644
--- a/src/qs8-igemm/gen/3x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/3x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -308,50 +308,50 @@
p -= 3 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -375,8 +375,8 @@
int8x16_t vout1x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc1x01234567), vqmovn_s16(vacc1x89ABCDEF));
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x0123456789ABCDEF = vmaxq_s8(vout2x0123456789ABCDEF, voutput_min);
vout1x0123456789ABCDEF = vmaxq_s8(vout1x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
index f74242e..7ead281 100644
--- a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -304,32 +304,32 @@
p -= 3 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -345,8 +345,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
diff --git a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 71f6cb1..de23e29 100644
--- a/src/qs8-igemm/gen/3x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/3x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -204,32 +204,32 @@
p -= 3 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -245,8 +245,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x8_t vout2x01234567 = vqmovn_s16(vacc2x01234567);
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x01234567 = vmax_s8(vout2x01234567, vget_low_s8(voutput_min));
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
diff --git a/src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
index 42826b1..1d7f210 100644
--- a/src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -616,62 +616,62 @@
p -= 4 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
- vacc3x89AB = vqrdmulhq_s32(vacc3x89AB, vmultiplier);
- vacc3xCDEF = vqrdmulhq_s32(vacc3xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
+ vacc3x89AB = vshlq_s32(vacc3x89AB, vright_pre_shift);
+ vacc3xCDEF = vshlq_s32(vacc3xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
- vacc3x89AB = vsraq_n_s32(vacc3x89AB, vbicq_s32(vacc3x89AB, vzero_shift_mask), 31);
- vacc3xCDEF = vsraq_n_s32(vacc3xCDEF, vbicq_s32(vacc3xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc3x89AB = vqdmulhq_s32(vacc3x89AB, vmultiplier);
+ vacc3xCDEF = vqdmulhq_s32(vacc3xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
- vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_shift);
- vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
+ vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_post_shift);
+ vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -701,8 +701,8 @@
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
int8x16_t vout3x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc3x01234567), vqmovn_s16(vacc3x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout3x0123456789ABCDEF = vmaxq_s8(vout3x0123456789ABCDEF, voutput_min);
vout2x0123456789ABCDEF = vmaxq_s8(vout2x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 90%
rename from src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
index 0cb91c7..21c1541 100644
--- a/src/qs8-igemm/gen/4x16c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/4x16c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -378,62 +378,62 @@
p -= 4 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc0x89AB = vqrdmulhq_s32(vacc0x89AB, vmultiplier);
- vacc0xCDEF = vqrdmulhq_s32(vacc0xCDEF, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc1x89AB = vqrdmulhq_s32(vacc1x89AB, vmultiplier);
- vacc1xCDEF = vqrdmulhq_s32(vacc1xCDEF, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc2x89AB = vqrdmulhq_s32(vacc2x89AB, vmultiplier);
- vacc2xCDEF = vqrdmulhq_s32(vacc2xCDEF, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
- vacc3x89AB = vqrdmulhq_s32(vacc3x89AB, vmultiplier);
- vacc3xCDEF = vqrdmulhq_s32(vacc3xCDEF, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc0x89AB = vshlq_s32(vacc0x89AB, vright_pre_shift);
+ vacc0xCDEF = vshlq_s32(vacc0xCDEF, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc1x89AB = vshlq_s32(vacc1x89AB, vright_pre_shift);
+ vacc1xCDEF = vshlq_s32(vacc1xCDEF, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc2x89AB = vshlq_s32(vacc2x89AB, vright_pre_shift);
+ vacc2xCDEF = vshlq_s32(vacc2xCDEF, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
+ vacc3x89AB = vshlq_s32(vacc3x89AB, vright_pre_shift);
+ vacc3xCDEF = vshlq_s32(vacc3xCDEF, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc0x89AB = vsraq_n_s32(vacc0x89AB, vbicq_s32(vacc0x89AB, vzero_shift_mask), 31);
- vacc0xCDEF = vsraq_n_s32(vacc0xCDEF, vbicq_s32(vacc0xCDEF, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc1x89AB = vsraq_n_s32(vacc1x89AB, vbicq_s32(vacc1x89AB, vzero_shift_mask), 31);
- vacc1xCDEF = vsraq_n_s32(vacc1xCDEF, vbicq_s32(vacc1xCDEF, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc2x89AB = vsraq_n_s32(vacc2x89AB, vbicq_s32(vacc2x89AB, vzero_shift_mask), 31);
- vacc2xCDEF = vsraq_n_s32(vacc2xCDEF, vbicq_s32(vacc2xCDEF, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
- vacc3x89AB = vsraq_n_s32(vacc3x89AB, vbicq_s32(vacc3x89AB, vzero_shift_mask), 31);
- vacc3xCDEF = vsraq_n_s32(vacc3xCDEF, vbicq_s32(vacc3xCDEF, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc0x89AB = vqdmulhq_s32(vacc0x89AB, vmultiplier);
+ vacc0xCDEF = vqdmulhq_s32(vacc0xCDEF, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc1x89AB = vqdmulhq_s32(vacc1x89AB, vmultiplier);
+ vacc1xCDEF = vqdmulhq_s32(vacc1xCDEF, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc2x89AB = vqdmulhq_s32(vacc2x89AB, vmultiplier);
+ vacc2xCDEF = vqdmulhq_s32(vacc2xCDEF, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc3x89AB = vqdmulhq_s32(vacc3x89AB, vmultiplier);
+ vacc3xCDEF = vqdmulhq_s32(vacc3xCDEF, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_shift);
- vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_shift);
- vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_shift);
- vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
- vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_shift);
- vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc0x89AB = vrshlq_s32(vacc0x89AB, vright_post_shift);
+ vacc0xCDEF = vrshlq_s32(vacc0xCDEF, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc1x89AB = vrshlq_s32(vacc1x89AB, vright_post_shift);
+ vacc1xCDEF = vrshlq_s32(vacc1xCDEF, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc2x89AB = vrshlq_s32(vacc2x89AB, vright_post_shift);
+ vacc2xCDEF = vrshlq_s32(vacc2xCDEF, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
+ vacc3x89AB = vrshlq_s32(vacc3x89AB, vright_post_shift);
+ vacc3xCDEF = vrshlq_s32(vacc3xCDEF, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc0x89ABCDEF = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x89AB), vacc0xCDEF), voutput_zero_point);
@@ -463,8 +463,8 @@
int8x16_t vout2x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc2x89ABCDEF));
int8x16_t vout3x0123456789ABCDEF = vcombine_s8(vqmovn_s16(vacc3x01234567), vqmovn_s16(vacc3x89ABCDEF));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout3x0123456789ABCDEF = vmaxq_s8(vout3x0123456789ABCDEF, voutput_min);
vout2x0123456789ABCDEF = vmaxq_s8(vout2x0123456789ABCDEF, voutput_min);
diff --git a/src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c b/src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
similarity index 93%
rename from src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
rename to src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
index 43eee7b..d3b7850 100644
--- a/src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mlal-padal-dup.c
+++ b/src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mlal-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -370,38 +370,38 @@
p -= 4 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -419,8 +419,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x16_t vout2x01234567_3x01234567 = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc3x01234567));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x01234567_3x01234567 = vmaxq_s8(vout2x01234567_3x01234567, voutput_min);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
diff --git a/src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c b/src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
similarity index 89%
rename from src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
rename to src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
index 48215e1..91b17f8 100644
--- a/src/qs8-igemm/gen/4x8c2-minmax-gemmlowp-neon-mull-padal-dup.c
+++ b/src/qs8-igemm/gen/4x8c2-minmax-rndnu-neon-mull-padal-dup.c
@@ -15,7 +15,7 @@
#include <xnnpack/math.h>
-void xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup(
+void xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup(
size_t mr,
size_t nc,
size_t kc,
@@ -244,38 +244,38 @@
p -= 4 * sizeof(void*);
} while (p != 0);
- const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->gemmlowp_neon.multiplier);
- const int32x4_t vright_shift = vld1q_dup_s32(¶ms->gemmlowp_neon.right_shift);
- const int32x4_t vzero_shift_mask = vreinterpretq_s32_u32(vceqq_s32(vright_shift, vmovq_n_s32(0)));
+ const int32x4_t vright_pre_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_pre_shift);
+ const int32x4_t vmultiplier = vld1q_dup_s32(¶ms->rndnu_neon.multiplier);
+ const int32x4_t vright_post_shift = vld1q_dup_s32(¶ms->rndnu_neon.right_post_shift);
- vacc0x0123 = vqrdmulhq_s32(vacc0x0123, vmultiplier);
- vacc0x4567 = vqrdmulhq_s32(vacc0x4567, vmultiplier);
- vacc1x0123 = vqrdmulhq_s32(vacc1x0123, vmultiplier);
- vacc1x4567 = vqrdmulhq_s32(vacc1x4567, vmultiplier);
- vacc2x0123 = vqrdmulhq_s32(vacc2x0123, vmultiplier);
- vacc2x4567 = vqrdmulhq_s32(vacc2x4567, vmultiplier);
- vacc3x0123 = vqrdmulhq_s32(vacc3x0123, vmultiplier);
- vacc3x4567 = vqrdmulhq_s32(vacc3x4567, vmultiplier);
+ vacc0x0123 = vshlq_s32(vacc0x0123, vright_pre_shift);
+ vacc0x4567 = vshlq_s32(vacc0x4567, vright_pre_shift);
+ vacc1x0123 = vshlq_s32(vacc1x0123, vright_pre_shift);
+ vacc1x4567 = vshlq_s32(vacc1x4567, vright_pre_shift);
+ vacc2x0123 = vshlq_s32(vacc2x0123, vright_pre_shift);
+ vacc2x4567 = vshlq_s32(vacc2x4567, vright_pre_shift);
+ vacc3x0123 = vshlq_s32(vacc3x0123, vright_pre_shift);
+ vacc3x4567 = vshlq_s32(vacc3x4567, vright_pre_shift);
- vacc0x0123 = vsraq_n_s32(vacc0x0123, vbicq_s32(vacc0x0123, vzero_shift_mask), 31);
- vacc0x4567 = vsraq_n_s32(vacc0x4567, vbicq_s32(vacc0x4567, vzero_shift_mask), 31);
- vacc1x0123 = vsraq_n_s32(vacc1x0123, vbicq_s32(vacc1x0123, vzero_shift_mask), 31);
- vacc1x4567 = vsraq_n_s32(vacc1x4567, vbicq_s32(vacc1x4567, vzero_shift_mask), 31);
- vacc2x0123 = vsraq_n_s32(vacc2x0123, vbicq_s32(vacc2x0123, vzero_shift_mask), 31);
- vacc2x4567 = vsraq_n_s32(vacc2x4567, vbicq_s32(vacc2x4567, vzero_shift_mask), 31);
- vacc3x0123 = vsraq_n_s32(vacc3x0123, vbicq_s32(vacc3x0123, vzero_shift_mask), 31);
- vacc3x4567 = vsraq_n_s32(vacc3x4567, vbicq_s32(vacc3x4567, vzero_shift_mask), 31);
+ vacc0x0123 = vqdmulhq_s32(vacc0x0123, vmultiplier);
+ vacc0x4567 = vqdmulhq_s32(vacc0x4567, vmultiplier);
+ vacc1x0123 = vqdmulhq_s32(vacc1x0123, vmultiplier);
+ vacc1x4567 = vqdmulhq_s32(vacc1x4567, vmultiplier);
+ vacc2x0123 = vqdmulhq_s32(vacc2x0123, vmultiplier);
+ vacc2x4567 = vqdmulhq_s32(vacc2x4567, vmultiplier);
+ vacc3x0123 = vqdmulhq_s32(vacc3x0123, vmultiplier);
+ vacc3x4567 = vqdmulhq_s32(vacc3x4567, vmultiplier);
- vacc0x0123 = vrshlq_s32(vacc0x0123, vright_shift);
- vacc0x4567 = vrshlq_s32(vacc0x4567, vright_shift);
- vacc1x0123 = vrshlq_s32(vacc1x0123, vright_shift);
- vacc1x4567 = vrshlq_s32(vacc1x4567, vright_shift);
- vacc2x0123 = vrshlq_s32(vacc2x0123, vright_shift);
- vacc2x4567 = vrshlq_s32(vacc2x4567, vright_shift);
- vacc3x0123 = vrshlq_s32(vacc3x0123, vright_shift);
- vacc3x4567 = vrshlq_s32(vacc3x4567, vright_shift);
+ vacc0x0123 = vrshlq_s32(vacc0x0123, vright_post_shift);
+ vacc0x4567 = vrshlq_s32(vacc0x4567, vright_post_shift);
+ vacc1x0123 = vrshlq_s32(vacc1x0123, vright_post_shift);
+ vacc1x4567 = vrshlq_s32(vacc1x4567, vright_post_shift);
+ vacc2x0123 = vrshlq_s32(vacc2x0123, vright_post_shift);
+ vacc2x4567 = vrshlq_s32(vacc2x4567, vright_post_shift);
+ vacc3x0123 = vrshlq_s32(vacc3x0123, vright_post_shift);
+ vacc3x4567 = vrshlq_s32(vacc3x4567, vright_post_shift);
- const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->gemmlowp_neon.output_zero_point);
+ const int16x8_t voutput_zero_point = vld1q_dup_s16(¶ms->rndnu_neon.output_zero_point);
#if XNN_ARCH_ARM64
const int16x8_t vacc0x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc0x0123), vacc0x4567), voutput_zero_point);
const int16x8_t vacc1x01234567 = vqaddq_s16(vqmovn_high_s32(vqmovn_s32(vacc1x0123), vacc1x4567), voutput_zero_point);
@@ -293,8 +293,8 @@
int8x16_t vout0x01234567_1x01234567 = vcombine_s8(vqmovn_s16(vacc0x01234567), vqmovn_s16(vacc1x01234567));
int8x16_t vout2x01234567_3x01234567 = vcombine_s8(vqmovn_s16(vacc2x01234567), vqmovn_s16(vacc3x01234567));
#endif
- const int8x16_t voutput_min = vld1q_dup_s8(¶ms->gemmlowp_neon.output_min);
- const int8x16_t voutput_max = vld1q_dup_s8(¶ms->gemmlowp_neon.output_max);
+ const int8x16_t voutput_min = vld1q_dup_s8(¶ms->rndnu_neon.output_min);
+ const int8x16_t voutput_max = vld1q_dup_s8(¶ms->rndnu_neon.output_max);
vout2x01234567_3x01234567 = vmaxq_s8(vout2x01234567_3x01234567, voutput_min);
vout0x01234567_1x01234567 = vmaxq_s8(vout0x01234567_1x01234567, voutput_min);
diff --git a/src/xnnpack/gemm.h b/src/xnnpack/gemm.h
index 71704e8..48c5e73 100644
--- a/src/xnnpack/gemm.h
+++ b/src/xnnpack/gemm.h
@@ -727,14 +727,14 @@
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_fp32_ukernel_1x8c2__neon_mlal_padal_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_fp32_ukernel_2x8c2__neon_mlal_padal_dup)
@@ -742,17 +742,17 @@
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_fp32_ukernel_1x8c2__neonv8_mlal_padal_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_fp32_ukernel_2x8c2__neonv8_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup)
-
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup)
+
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c8__neon_mull_padal)
DECLARE_QS8_GEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c8__neon_mull_padal)
diff --git a/src/xnnpack/igemm.h b/src/xnnpack/igemm.h
index 611ae7e..e917491 100644
--- a/src/xnnpack/igemm.h
+++ b/src/xnnpack/igemm.h
@@ -553,14 +553,14 @@
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16__neon_mull_addw_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16__neon_mull_addw_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_fp32_ukernel_1x8c2__neon_mlal_padal_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2__neon_mlal_padal_dup)
@@ -568,17 +568,17 @@
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_fp32_ukernel_1x8c2__neonv8_mlal_padal_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_fp32_ukernel_2x8c2__neonv8_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup)
-DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup)
-
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup)
+
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup)
+DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c8__neon_mull_padal)
DECLARE_QS8_IGEMM_MINMAX_UKERNEL_FUNCTION(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c8__neon_mull_padal)
diff --git a/test/qs8-gemm-minmax-gemmlowp.cc b/test/qs8-gemm-minmax-gemmlowp.cc
index b31f51d..bef5741 100644
--- a/test/qs8-gemm-minmax-gemmlowp.cc
+++ b/test/qs8-gemm-minmax-gemmlowp.cc
@@ -22,6 +22,918 @@
#include "gemm-microkernel-tester.h"
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
#if XNN_ARCH_ARM64
TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C8__AARCH64_NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
@@ -13247,7302 +14159,6 @@
#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(43)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_GEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
TEST(QS8_GEMM_MINMAX_GEMMLOWP_1X8C8__NEON_MULL_PADAL, k_eq_8) {
TEST_REQUIRES_ARM_NEON;
GemmMicrokernelTester()
diff --git a/test/qs8-gemm-minmax-gemmlowp.yaml b/test/qs8-gemm-minmax-gemmlowp.yaml
index ecdccaf..510db1b 100644
--- a/test/qs8-gemm-minmax-gemmlowp.yaml
+++ b/test/qs8-gemm-minmax-gemmlowp.yaml
@@ -3,6 +3,12 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
+- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
+ k-block: 16
- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c8__aarch64_neon_mlal_padal
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 16
@@ -90,54 +96,6 @@
- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8__neon_mull_addw_dup
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
- name: xnn_qs8_gemm_minmax_gemmlowp_ukernel_1x8c8__neon_mull_padal
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 8
diff --git a/test/qs8-gemm-minmax-rndnu.cc b/test/qs8-gemm-minmax-rndnu.cc
index d7ee1ab..5fcecdf 100644
--- a/test/qs8-gemm-minmax-rndnu.cc
+++ b/test/qs8-gemm-minmax-rndnu.cc
@@ -22,6 +22,7302 @@
#include "gemm-microkernel-tester.h"
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(43)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(37)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .a_stride(163)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_a) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .a_stride(83)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_GEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
#if XNN_ARCH_ARM64
TEST(QS8_GEMM_MINMAX_RNDNU_2X8C8__AARCH64_NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
@@ -8231,918 +15527,6 @@
#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(19)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(37)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .a_stride(163)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_a) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .a_stride(83)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_GEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
TEST(QS8_GEMM_MINMAX_RNDNU_1X8C8__NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
GemmMicrokernelTester()
diff --git a/test/qs8-gemm-minmax-rndnu.yaml b/test/qs8-gemm-minmax-rndnu.yaml
index 855ab68..c9ca7f5 100644
--- a/test/qs8-gemm-minmax-rndnu.yaml
+++ b/test/qs8-gemm-minmax-rndnu.yaml
@@ -3,6 +3,54 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c8__aarch64_neon_mlal_padal
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16
@@ -57,12 +105,6 @@
- name: xnn_qs8_gemm_minmax_rndnu_ukernel_4x16c4__aarch64_neondot_cortex_a55
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16
-- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_rndnu_neon_params
- k-block: 16
-- name: xnn_qs8_gemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_rndnu_neon_params
- k-block: 16
- name: xnn_qs8_gemm_minmax_rndnu_ukernel_1x8c8__neon_mlal_padal
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16
diff --git a/test/qs8-igemm-minmax-gemmlowp.cc b/test/qs8-igemm-minmax-gemmlowp.cc
index 2c41d8e..0b67ac2 100644
--- a/test/qs8-igemm-minmax-gemmlowp.cc
+++ b/test/qs8-igemm-minmax-gemmlowp.cc
@@ -22,6 +22,942 @@
#include "gemm-microkernel-tester.h"
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 1; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 2; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
#if XNN_ARCH_ARM64
TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C8__AARCH64_NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
@@ -22487,7494 +23423,6 @@
#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(43)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 1; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(43)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 2; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(127)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 3; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(127)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 4; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(8)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(43)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 1; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(43)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(83)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 2; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(83)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(127)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 3; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(127)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(8)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 8; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 9; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 16; k <= 80; k += 8) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(163)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 4; mz++) {
- for (size_t k = 1; k <= 40; k += 9) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(163)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(8)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 1; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 2; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(251)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 3; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(251)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(3)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(331)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 4; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(331)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(4)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(83)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 1; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(83)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(1)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(163)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 2; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(163)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(2)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 3; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(251)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 3; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(251)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(3)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(3)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(16)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(n)
- .k(k)
- .cn_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 17; n < 32; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 32; n <= 48; n += 16) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 4; m++) {
- for (uint32_t n = 1; n <= 16; n++) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(19)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(331)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 4; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(k)
- .ks(3)
- .a_offset(331)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-
- TEST(QS8_IGEMM_MINMAX_GEMMLOWP_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(4)
- .nr(16)
- .kr(2)
- .sr(1)
- .m(4)
- .n(16)
- .k(16)
- .cm_stride(19)
- .Test(xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_gemmlowp_neon_params, xnn_init_qs8_requantization_gemmlowp_params, xnn_qs8_requantize_gemmlowp);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
TEST(QS8_IGEMM_MINMAX_GEMMLOWP_1X8__NEON_MULL_ADDW_DUP, k_eq_8) {
TEST_REQUIRES_ARM_NEON;
GemmMicrokernelTester()
diff --git a/test/qs8-igemm-minmax-gemmlowp.yaml b/test/qs8-igemm-minmax-gemmlowp.yaml
index da3e2d7..3ab478c 100644
--- a/test/qs8-igemm-minmax-gemmlowp.yaml
+++ b/test/qs8-igemm-minmax-gemmlowp.yaml
@@ -3,6 +3,12 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
+- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
+ k-block: 16
- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c8__aarch64_neon_mlal_padal
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 16
@@ -147,54 +153,6 @@
- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c16__neon_mlal_padal
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mull_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 8
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_2x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_3x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_4x16c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
- k-block: 16
- name: xnn_qs8_igemm_minmax_gemmlowp_ukernel_1x8__neon_mull_addw_dup
init: xnn_init_qs8_conv_minmax_gemmlowp_neon_params
k-block: 8
diff --git a/test/qs8-igemm-minmax-rndnu.cc b/test/qs8-igemm-minmax-rndnu.cc
index 44d41f6..c66cbb1 100644
--- a/test/qs8-igemm-minmax-rndnu.cc
+++ b/test/qs8-igemm-minmax-rndnu.cc
@@ -22,6 +22,7494 @@
#include "gemm-microkernel-tester.h"
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(43)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 1; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(43)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 2; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(127)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 3; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(127)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 4; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(8)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(43)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 1; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(43)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 2; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(127)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 3; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(127)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_eq_8_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(8)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_lt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 8; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 9; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, k_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 16; k <= 80; k += 8) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 4; mz++) {
+ for (size_t k = 1; k <= 40; k += 9) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MULL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(8)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 1; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 2; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(251)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 3; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(251)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(8)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 9; n < 16; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 16; n <= 24; n += 8) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 8; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(11)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(331)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 4; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(k)
+ .ks(3)
+ .a_offset(331)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(8)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(8)
+ .k(16)
+ .cm_stride(11)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 1; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 1; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(83)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_1X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(1)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(1)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 2; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 2; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(163)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_2X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(2)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(2)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 3; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(251)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 3; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(251)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_3X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(3)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(3)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
+#if XNN_ARCH_ARM || XNN_ARCH_ARM64
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(16)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(16)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k < 16; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 17; k < 32; k++) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 32; k <= 160; k += 16) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_strided_cn) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(n)
+ .k(k)
+ .cn_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .ks(3)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_gt_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 17; n < 32; n++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, n_div_16_small_kernel) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t n = 32; n <= 48; n += 16) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ for (uint32_t m = 1; m <= 4; m++) {
+ for (uint32_t n = 1; n <= 16; n++) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(m)
+ .n(n)
+ .k(k)
+ .cm_stride(19)
+ .iterations(1)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, a_offset) {
+ TEST_REQUIRES_ARM_NEON;
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(331)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, zero) {
+ TEST_REQUIRES_ARM_NEON;
+ for (uint32_t mz = 0; mz < 4; mz++) {
+ for (size_t k = 1; k <= 80; k += 17) {
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(k)
+ .ks(3)
+ .a_offset(331)
+ .zero_index(mz)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+ }
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, qmin) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .qmin(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, qmax) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .qmax(128)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+
+ TEST(QS8_IGEMM_MINMAX_RNDNU_4X16C2__NEON_MLAL_PADAL_DUP, strided_cm) {
+ TEST_REQUIRES_ARM_NEON;
+ GemmMicrokernelTester()
+ .mr(4)
+ .nr(16)
+ .kr(2)
+ .sr(1)
+ .m(4)
+ .n(16)
+ .k(16)
+ .cm_stride(19)
+ .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
+ }
+#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
+
+
#if XNN_ARCH_ARM64
TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C8__AARCH64_NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
@@ -6575,942 +14063,6 @@
#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 1; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 1; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(83)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(1)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(1)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_m) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(8)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_eq_16_subtile_n) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(16)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_lt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k < 16; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_gt_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 17; k < 32; k++) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, k_div_16_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 32; k <= 160; k += 16) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_strided_cn) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(n)
- .k(k)
- .cn_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, small_kernel_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .ks(3)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_gt_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 9; n < 16; n++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, n_div_8_small_kernel) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t n = 16; n <= 24; n += 8) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm_subtile) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- for (uint32_t m = 1; m <= 2; m++) {
- for (uint32_t n = 1; n <= 8; n++) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(m)
- .n(n)
- .k(k)
- .cm_stride(11)
- .iterations(1)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, a_offset) {
- TEST_REQUIRES_ARM_NEON;
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, zero) {
- TEST_REQUIRES_ARM_NEON;
- for (uint32_t mz = 0; mz < 2; mz++) {
- for (size_t k = 1; k <= 80; k += 17) {
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(k)
- .ks(3)
- .a_offset(163)
- .zero_index(mz)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
- }
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmin) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmin(128)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, qmax) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .qmax(128)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-
- TEST(QS8_IGEMM_MINMAX_RNDNU_2X8C2__NEON_MLAL_PADAL_DUP, strided_cm) {
- TEST_REQUIRES_ARM_NEON;
- GemmMicrokernelTester()
- .mr(2)
- .nr(8)
- .kr(2)
- .sr(1)
- .m(2)
- .n(8)
- .k(16)
- .cm_stride(11)
- .Test(xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup, xnn_init_qs8_conv_minmax_rndnu_neon_params, xnn_init_qs8_requantization_rndnu_params, xnn_qs8_requantize_rndnu);
- }
-#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
-
-
-#if XNN_ARCH_ARM || XNN_ARCH_ARM64
TEST(QS8_IGEMM_MINMAX_RNDNU_1X8C8__NEON_MLAL_PADAL, k_eq_16) {
TEST_REQUIRES_ARM_NEON;
GemmMicrokernelTester()
diff --git a/test/qs8-igemm-minmax-rndnu.yaml b/test/qs8-igemm-minmax-rndnu.yaml
index 5ffb1b6..b8722ce 100644
--- a/test/qs8-igemm-minmax-rndnu.yaml
+++ b/test/qs8-igemm-minmax-rndnu.yaml
@@ -3,6 +3,54 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mull_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 8
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_3x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_4x8c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_3x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
+- name: xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c2__neon_mlal_padal_dup
+ init: xnn_init_qs8_conv_minmax_rndnu_neon_params
+ k-block: 16
- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c8__aarch64_neon_mlal_padal
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16
@@ -45,12 +93,6 @@
- name: xnn_qs8_igemm_minmax_rndnu_ukernel_4x16c4__aarch64_neondot_ld128
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16
-- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_rndnu_neon_params
- k-block: 16
-- name: xnn_qs8_igemm_minmax_rndnu_ukernel_2x8c2__neon_mlal_padal_dup
- init: xnn_init_qs8_conv_minmax_rndnu_neon_params
- k-block: 16
- name: xnn_qs8_igemm_minmax_rndnu_ukernel_1x8c8__neon_mlal_padal
init: xnn_init_qs8_conv_minmax_rndnu_neon_params
k-block: 16