Set _diffThreshold on our TestCase (#110603)
Signed-off-by: Edward Z. Yang <ezyang@meta.com>
Pull Request resolved: https://github.com/pytorch/pytorch/pull/110603
Approved by: https://github.com/albanD
diff --git a/torch/testing/_internal/common_utils.py b/torch/testing/_internal/common_utils.py
index bc905b2..e3aa269 100644
--- a/torch/testing/_internal/common_utils.py
+++ b/torch/testing/_internal/common_utils.py
@@ -2292,6 +2292,11 @@
# `torch.float` when `setUp` and `tearDown` are called.
_default_dtype_check_enabled: bool = False
+ # Always use difflib to print diffs on multi line equality.
+ # Undocumented feature in unittest
+ _diffThreshold = sys.maxsize
+ maxDiff = sys.maxsize
+
# checker to early terminate test suite if unrecoverable failure occurs.
def _should_stop_test_suite(self):
if torch.cuda.is_initialized():