Add support for MatMul to BatchMatMulFP16Acc{16,32}Fake Op Mapping
Test Plan: f276981395
Reviewed By: hx89
Differential Revision: D28815646
fbshipit-source-id: c16b081bf3da2b157b9d42ea67b03dae88e82c6d
diff --git a/caffe2/opt/fakefp16_transform.cc b/caffe2/opt/fakefp16_transform.cc
index 4db6741..10c54ef 100644
--- a/caffe2/opt/fakefp16_transform.cc
+++ b/caffe2/opt/fakefp16_transform.cc
@@ -43,6 +43,7 @@
"SparseLengthsWeightedSumFused8BitRowwiseFakeFP16NNPI"},
{"SparseLengthsMeanFused8BitRowwise",
"SparseLengthsMeanFused8BitRowwiseFakeFP16AccFP16"},
+ {"MatMul", "BatchMatMulFP16Acc32Fake"},
{"BatchMatMul", "BatchMatMulFP16Acc32Fake"},
{"Sigmoid", "SigmoidFakeFp16"},
{"SpatialBN", "SpatialBNFakeFp16NNPI"},
@@ -60,6 +61,7 @@
fake_fp16_op_conversion_map["FC"] = "Fp16FCAcc16NNPI";
fake_fp16_op_conversion_map["FbFCPacked"] = "Fp16FCAcc16NNPI";
fake_fp16_op_conversion_map["BatchMatMul"] = "BatchMatMulFP16Acc16Fake";
+ fake_fp16_op_conversion_map["MatMul"] = "BatchMatMulFP16Acc16Fake";
}
if (use_nnpi) {
fake_fp16_op_conversion_map["Sigmoid"] = "SigmoidFakeFp16NNPI";