[bug fix] for add_activation layer, mobilenetv2 is fixed (#71979)
Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/71979
as titled
add local trt test for easy verification
Test Plan:
buck run mode/opt -c=python.package_style=inplace scripts/wwei6:trt_local_test
buck test mode/dev-nosan caffe2/test/fx2trt/converters:test_hardtanh
Reviewed By: 842974287
Differential Revision: D33824456
fbshipit-source-id: d824b7da09929de66190fd8a077d4e73b68b9909
(cherry picked from commit 19abcadecc6ff8b58991552a874230a068294e0d)
diff --git a/test/fx2trt/converters/acc_op/test_hardtanh.py b/test/fx2trt/converters/acc_op/test_hardtanh.py
index 15c6070..9fd1b5b 100644
--- a/test/fx2trt/converters/acc_op/test_hardtanh.py
+++ b/test/fx2trt/converters/acc_op/test_hardtanh.py
@@ -5,13 +5,19 @@
import torch.nn as nn
from torch.testing._internal.common_fx2trt import AccTestCase
from torch.testing._internal.common_utils import run_tests
+from parameterized import parameterized
class TestHardtanhConverter(AccTestCase):
- def test_hardtanh(self):
+ @parameterized.expand([
+ (-2.0, 6),
+ (0, 1),
+ (0.5, 7),
+ ])
+ def test_hardtanh(self, test_min_value, test_max_value):
class Hardtanh(nn.Module):
def forward(self, x):
- return nn.functional.hardtanh(x, min_val=-0.5)
+ return nn.functional.hardtanh(x, min_val=test_min_value, max_val=test_max_value)
inputs = [torch.randn(2, 10, 10, 10)]
self.run_test(Hardtanh(), inputs, expected_ops={acc_ops.hardtanh})
diff --git a/torch/fx/experimental/fx2trt/converters/converter_utils.py b/torch/fx/experimental/fx2trt/converters/converter_utils.py
index 7eb903c..6d94bf6 100644
--- a/torch/fx/experimental/fx2trt/converters/converter_utils.py
+++ b/torch/fx/experimental/fx2trt/converters/converter_utils.py
@@ -446,9 +446,9 @@
"of the TensorRT region!"
)
layer = network.add_activation(input_val, operation_type)
- if alpha:
+ if alpha is not None:
layer.alpha = alpha
- if beta:
+ if beta is not None:
layer.beta = beta
set_layer_name(layer, target, name)
return layer.get_output(0)