Add Native support for fusedmatmul with leakyrelu
diff --git a/tensorflow/core/ops/mkl_nn_ops.cc b/tensorflow/core/ops/mkl_nn_ops.cc
index e60f671..f5e3f55 100644
--- a/tensorflow/core/ops/mkl_nn_ops.cc
+++ b/tensorflow/core/ops/mkl_nn_ops.cc
@@ -293,11 +293,11 @@
.Attr("T: {bfloat16, float}")
.Attr("num_args: int >= 0")
.Attr("fused_ops: list(string) = []")
- // Attributes for the FusedBatchNorm ----------- //
+ // Attributes for the FusedBatchNorm ------------------------------------ //
.Attr("epsilon: float = 0.0001")
// Attributes for the LeakyRelu ----------------------------------------- //
.Attr("leakyrelu_alpha: float = 0.2")
- // --------------------------------------------- //
+ // ---------------------------------------------------------------------- //
.SetShapeFn(shape_inference::MatMulShape)
.Doc(R"doc(
MKL version of FusedMatMul operator. Uses MKL-DNN APIs to implement MatMul
@@ -318,9 +318,11 @@
.Attr("T: {bfloat16, float}")
.Attr("num_args: int >= 0")
.Attr("fused_ops: list(string) = []")
- // Attributes for the FusedBatchNorm ----------- //
+ // Attributes for the FusedBatchNorm ------------------------------------ //
.Attr("epsilon: float = 0.0001")
- // --------------------------------------------- //
+ // Attributes for the LeakyRelu ----------------------------------------- //
+ .Attr("leakyrelu_alpha: float = 0.2")
+ // ---------------------------------------------------------------------- //
.SetShapeFn(shape_inference::MatMulShape)
.Doc(R"doc(
oneDNN version of FusedMatMul operator that does not depend