[PT2][Inductor] Change the split cat log to debug (#120823)

Summary: Address the report in https://github.com/pytorch/pytorch/issues/120771.

Test Plan: see signal

Differential Revision: D54323475

Pull Request resolved: https://github.com/pytorch/pytorch/pull/120823
Approved by: https://github.com/jackiexu1992
diff --git a/torch/_inductor/fx_passes/split_cat.py b/torch/_inductor/fx_passes/split_cat.py
index da470e1..59a967e 100644
--- a/torch/_inductor/fx_passes/split_cat.py
+++ b/torch/_inductor/fx_passes/split_cat.py
@@ -115,10 +115,10 @@
     graph = match.graph
     split_input, split_size, split_dim = _get_split_args(split_node)
     if split_input is None or split_dim is None or split_size is None:
-        log.info("couldn't find split args")
+        log.debug("couldn't find split args")
         return
     if "example_value" not in split_node.meta:
-        log.warning("example value absent for node: %s", split_node)
+        log.debug("example value absent for node: %s", split_node)
         return
     assert isinstance(split_node.meta["example_value"], (list, tuple))
     split_sections = [t.size()[split_dim] for t in split_node.meta["example_value"]]
@@ -180,10 +180,10 @@
         else:
             dim = 0
     if input is None:
-        log.info("couldn't find unbind args")
+        log.debug("couldn't find unbind args")
         return
     if "example_value" not in input.meta:
-        log.warning("example value absent for node: %s", input)
+        log.debug("example value absent for node: %s", input)
         return
     ndim = input.meta["example_value"].ndim
     if dim < 0:  # Normalize unbind dim
@@ -219,12 +219,12 @@
         else:
             cat_dim = 0
     if tensors is None or cat_dim is None:
-        log.info("couldn't find cat args")
+        log.debug("couldn't find cat args")
         return
     assert isinstance(tensors, (list, tuple))
     for tensor in itertools.chain([cat_node], tensors):
         if "example_value" not in tensor.meta:
-            log.warning("example value absent for node: %s", tensor)
+            log.debug("example value absent for node: %s", tensor)
             return
 
     ndim = cat_node.meta["example_value"].dim()
@@ -264,14 +264,14 @@
     tensors = get_arg_value(node, 0, "tensors")
     dim = get_arg_value(node, 1, "dim") or 0
     if tensors is None or dim is None:
-        log.info("couldn't find stack args")
+        log.debug("couldn't find stack args")
         return
     assert isinstance(tensors, (list, tuple))
 
     # A bug in pytorch, some nodes miss the example_value metadata
     for tensor in itertools.chain([node], tensors):
         if "example_value" not in tensor.meta:
-            log.warning("example value absent for node: %s", tensor)
+            log.debug("example value absent for node: %s", tensor)
             return
 
     ndim = node.meta["example_value"].dim()