[easy] Remove large LayerNorm sample input causing OOM from ModuleInfo (#98424)

Pull Request resolved: https://github.com/pytorch/pytorch/pull/98424
Approved by: https://github.com/huydhn, https://github.com/albanD
diff --git a/torch/testing/_internal/common_modules.py b/torch/testing/_internal/common_modules.py
index 7b3f99d..81fc53b 100644
--- a/torch/testing/_internal/common_modules.py
+++ b/torch/testing/_internal/common_modules.py
@@ -1291,10 +1291,6 @@
             forward_input=FunctionInput(make_input((4, 2, 2, 5))),
             desc='3d_no_elementwise_affine'),
         ModuleInput(
-            constructor_input=FunctionInput([56, 56, 56], 1e-5, False),
-            forward_input=FunctionInput(make_input((4, 56, 56, 56))),
-            desc='3d_no_affine_large_feature'),
-        ModuleInput(
             constructor_input=FunctionInput([5], 1e-3),
             forward_input=FunctionInput(make_input((0, 5))),
             desc='1d_empty_elementwise_affine'),
diff --git a/torch/testing/_internal/common_nn.py b/torch/testing/_internal/common_nn.py
index addfe32..aaf5169 100644
--- a/torch/testing/_internal/common_nn.py
+++ b/torch/testing/_internal/common_nn.py
@@ -2978,6 +2978,17 @@
         reference_fn=single_batch_reference_fn,
         desc="no_batch_dim",
     ),
+    dict(
+        module_name='LayerNorm',
+        constructor_args=([56, 56, 56], 1e-5, False),
+        cpp_constructor_args='torch::nn::LayerNormOptions({56, 56, 56}).eps(1e-5).elementwise_affine(false)',
+        input_size=(4, 56, 56, 56),
+        cudnn=True,
+        check_eval=True,
+        gradcheck_fast_mode=True,
+        check_half=True,
+        desc='3d_no_affine_large_feature',
+    ),
 ]
 
 # add conv padding mode tests: