Clean up ShadowEnabled mode for MLIR optimization passes as it is no longer in use.

This was useful for launching the bridge in shadow mode and collect metrics, but now post dark launch this is no longer required.

PiperOrigin-RevId: 380010266
Change-Id: I393fa8d443eb2cff2648075b6a74ae6c8009aa25
diff --git a/tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc b/tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc
index 43b5b6d..868dc03 100644
--- a/tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc
+++ b/tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc
@@ -120,7 +120,6 @@
   //  overall_state equals to:
   //    Enabled if at least one pass is Enabled.
   //    Disabled if all passes are Disabled.
-  //    ShadowEnabled if all non Disabled passes are ShadowEnabled.
   //    FallbackEnabled if there are no Enabled passes and there is at least one
   //      FallbackEnabled pass.
   MlirOptimizationPassState overall_state = MlirOptimizationPassState::Disabled;
@@ -130,18 +129,12 @@
   per_pass_state.reserve(registry_->passes().size());
 
   int num_passes_enabled = 0, num_passes_disabled = 0,
-      num_passes_shadow_enabled = 0, num_passes_fallback_enabled = 0;
+      num_passes_fallback_enabled = 0;
   for (const auto& pass_registration : registry_->passes()) {
     MlirOptimizationPassState pass_state = pass_registration.pass->GetPassState(
         &device_set, config_proto, **graph, *flib_def);
     per_pass_state.push_back(pass_state);
     switch (pass_state) {
-      case MlirOptimizationPassState::ShadowEnabled: {
-        if (overall_state == MlirOptimizationPassState::Disabled)
-          overall_state = MlirOptimizationPassState::ShadowEnabled;
-        ++num_passes_shadow_enabled;
-        break;
-      }
       case MlirOptimizationPassState::FallbackEnabled: {
         if (overall_state != MlirOptimizationPassState::Enabled)
           overall_state = MlirOptimizationPassState::FallbackEnabled;
@@ -176,7 +169,6 @@
   LOG_FIRST_N(INFO, 1) << "MLIR Graph Optimization Passes."
                        << " Enabled: " << num_passes_enabled
                        << ", Disabled: " << num_passes_disabled
-                       << ", ShadowEnabled: " << num_passes_shadow_enabled
                        << ", FallbackEnabled: " << num_passes_fallback_enabled
                        << ", Total: " << registry_->passes().size();
 
@@ -204,7 +196,7 @@
       return module_ref_status.status();
     }
 
-    // Do not fail, just keep the original TF graph unchanged in shadow mode.
+    // Do not fail, just keep the original TF graph unchanged in fallback mode.
     return Status::OK();
   }
 
@@ -222,29 +214,19 @@
 
     Status pass_status = Status::OK();
     auto pass_state = per_pass_state[per_pass_state_index++];
-    // There will not be MLIR module conversion back to the TF graph at the
-    // very end if overall state is ShadowEnabled.
-    // Avoid making MLIR module copies in this case.
-    if (pass_state == MlirOptimizationPassState::Enabled ||
-        (pass_state == MlirOptimizationPassState::ShadowEnabled &&
-         overall_state == MlirOptimizationPassState::ShadowEnabled)) {
+    if (pass_state == MlirOptimizationPassState::Enabled) {
       pass_status = pass_registration.pass->Run(config_proto, *module_ref,
                                                 **graph, *flib_def);
-    } else if (pass_state == MlirOptimizationPassState::ShadowEnabled ||
-               pass_state == MlirOptimizationPassState::FallbackEnabled) {
-      // Make sure when the pass is:
-      //   ShadowEnabled, it does not modify the MLIR module.
-      //   FallbackEnabled, it only modifies the MLIR module in case of
-      //     no failures.
+    } else if (pass_state == MlirOptimizationPassState::FallbackEnabled) {
+      // Make sure when the pass is FallbackEnabled, it only modifies the MLIR
+      // module in case of no failures.
       auto module_ref_clone = module_ref->clone();
       pass_status = pass_registration.pass->Run(config_proto, module_ref_clone,
                                                 **graph, *flib_def);
-      if (pass_state == MlirOptimizationPassState::FallbackEnabled &&
-          pass_status.ok()) {
+      if (pass_status.ok())
         module_ref = module_ref_clone;
-      } else {
+      else
         module_ref_clone->destroy();
-      }
     }
 
     if (!pass_status.ok()) {
@@ -274,19 +256,6 @@
   GraphExportConfig export_config;
   absl::flat_hash_set<Node*> control_ret_nodes;
 
-  // All passes are shadow enabled. Just convert MLIR module back to
-  // the dummy graph and record success/failure stats.
-  if (overall_state == MlirOptimizationPassState::ShadowEnabled) {
-    auto empty_graph = std::make_unique<Graph>(OpRegistry::Global());
-    FunctionLibraryDefinition empty_flib = empty_graph->flib_def();
-
-    auto mlir_to_graph_status =
-        ConvertMlirToGraph(*module_ref, export_config, &empty_graph,
-                           &empty_flib, &control_ret_nodes);
-
-    return Status::OK();
-  }
-
   // Some or all passes are enabled. Convert MLIR module and return back
   // resulted graph.
   TF_RETURN_WITH_CONTEXT_IF_ERROR(
@@ -321,9 +290,7 @@
       pass->GetPassState(options.device_set, options.session_options->config,
                          **options.graph, *options.flib_def);
 
-  // Do not run V1 compatibility pass in shadow mode.
-  if (pass_state == MlirOptimizationPassState::Disabled ||
-      pass_state == MlirOptimizationPassState::ShadowEnabled) {
+  if (pass_state == MlirOptimizationPassState::Disabled) {
     LOG_FIRST_N(INFO, 1) << "MLIR V1 optimization pass is not enabled";
     return Status::OK();
   }
diff --git a/tensorflow/compiler/mlir/mlir_graph_optimization_pass.h b/tensorflow/compiler/mlir/mlir_graph_optimization_pass.h
index a0b6953..1f66326 100644
--- a/tensorflow/compiler/mlir/mlir_graph_optimization_pass.h
+++ b/tensorflow/compiler/mlir/mlir_graph_optimization_pass.h
@@ -31,18 +31,10 @@
 
 // Disabled - skip execution of the pass.
 // Enabled - execute the pass, propagate errors to the caller if any.
-// ShadowEnabled - execute the pass in a shadow mode. The pass should not commit
-//   any changes to the MLIR module it's processing. Failures are not propagated
-//   to the caller.
 // FallbackEnabled - execute the pass and commit all the changes to the MLIR
 //   module in case of success. Do not commit any changes in case of failures,
 //   let the rest of the pipeline run.
-enum class MlirOptimizationPassState {
-  Disabled,
-  Enabled,
-  ShadowEnabled,
-  FallbackEnabled
-};
+enum class MlirOptimizationPassState { Disabled, Enabled, FallbackEnabled };
 
 // An API for registering MLIR ModulePass with the Tensorflow runtime. These
 // passes are running only for function graphs built by Tensorflow V2 and
@@ -56,11 +48,12 @@
   // Returns an enum value:
   //   Enabled if the pass is enabled for the given graph with specified config.
   //   Disabled if the pass is disabled.
-  //   ShadowEnabled if the pass needs to be executed in shadow mode.
+  //   FallbackEnabled if the pass needs to be executed in fallback mode.
   //
-  // When the pass is ShadowEnabled, the pass is executed for metrics collection
-  // and reporting purposes only, but none of the changes it makes to the MLIR
-  // module will be committed.
+  // When the pass is FallbackEnabled, the pass is executed and the changes it
+  // makes to the MLIR module will be committed only if the pass was successful,
+  // otherwise no changes are committed and the rest of the pipeline is run.
+  //
   // `device_set` can be nullptr if the devices information is not
   // available or no device specific filtering is required.
   // `function_library` contains function definitions for function calls in
@@ -122,19 +115,6 @@
       : registry_(registry) {}
 
   // Executes all of the underlying registered MlirOptimizationPasses.
-  //
-  // The MlirFunctionOptimizationPass will be executed in fully shadow mode if
-  // all of the underlying registered MlirOptimizationPasses are ShadowEnabled.
-  // In this case, no changes should be done to the original TF graph and no
-  // failures propagated back to the user. Failures during the conversion
-  // of TF graph to MLIR module and back will be treated as a soft
-  // failures, e.g., relevant stats will be recorded and no error returned
-  // back to the caller.
-  //
-  // In case some of the passes are shadow enabled while others are enabled,
-  // failures in the enabled passes will be treated as real errors and
-  // propagated back to the caller. Failure during the shadow pass execution
-  // is a soft failure.
   Status Run(const DeviceSet& device_set, const ConfigProto& config_proto,
              std::unique_ptr<Graph>* graph, FunctionLibraryDefinition* flib_def,
              std::vector<std::string>* control_ret_node_names,
diff --git a/tensorflow/compiler/mlir/mlir_graph_optimization_pass_test.cc b/tensorflow/compiler/mlir/mlir_graph_optimization_pass_test.cc
index 65d1577..e697204 100644
--- a/tensorflow/compiler/mlir/mlir_graph_optimization_pass_test.cc
+++ b/tensorflow/compiler/mlir/mlir_graph_optimization_pass_test.cc
@@ -152,7 +152,7 @@
   bool control_rets_updated_{false};
 };
 
-TEST_F(MlirGraphOptimizationPassTest, OptimizationPassFailsNoShadow) {
+TEST_F(MlirGraphOptimizationPassTest, OptimizationPassFailsNoFallback) {
   Init(Status(error::Code::ABORTED, "aborted"),
        {MlirOptimizationPassState::Enabled});
 
@@ -166,56 +166,9 @@
   verifyGraph(original_graph_def);
 }
 
-TEST_F(MlirGraphOptimizationPassTest, OptimizationPassFailsShadow) {
-  Init(Status(error::Code::ABORTED, "aborted"),
-       {MlirOptimizationPassState::ShadowEnabled,
-        MlirOptimizationPassState::ShadowEnabled});
-
-  GraphDef original_graph_def;
-  graph_->ToGraphDef(&original_graph_def);
-
-  EXPECT_EQ(function_optimization_pass_.Run(
-                device_set_, config_proto_, &graph_, flib_.get(),
-                &control_ret_node_names_, &control_rets_updated_),
-            Status::OK());
-  verifyGraph(original_graph_def);
-}
-
-TEST_F(MlirGraphOptimizationPassTest, OptimizationPassDoesNotFailShadow) {
-  Init(Status::OK(), {MlirOptimizationPassState::Disabled,
-                      MlirOptimizationPassState::ShadowEnabled});
-
-  GraphDef original_graph_def;
-  graph_->ToGraphDef(&original_graph_def);
-
-  EXPECT_EQ(function_optimization_pass_.Run(
-                device_set_, config_proto_, &graph_, flib_.get(),
-                &control_ret_node_names_, &control_rets_updated_),
-            Status::OK());
-  verifyGraph(original_graph_def);
-}
-
-TEST_F(MlirGraphOptimizationPassTest,
-       OptimizationPassFailsMixShadowAndEnabled) {
-  Init(Status(error::Code::ABORTED, "aborted"),
-       {MlirOptimizationPassState::Disabled, MlirOptimizationPassState::Enabled,
-        MlirOptimizationPassState::ShadowEnabled});
-
-  GraphDef original_graph_def;
-  graph_->ToGraphDef(&original_graph_def);
-
-  EXPECT_EQ(function_optimization_pass_.Run(
-                device_set_, config_proto_, &graph_, flib_.get(),
-                &control_ret_node_names_, &control_rets_updated_),
-            Status(error::Code::ABORTED, "aborted"));
-  verifyGraph(original_graph_def);
-}
-
-TEST_F(MlirGraphOptimizationPassTest,
-       OptimizationPassFailsShadowDisabledFallback) {
+TEST_F(MlirGraphOptimizationPassTest, OptimizationPassFailsDisabledFallback) {
   Init(Status(error::Code::ABORTED, "aborted"),
        {MlirOptimizationPassState::Disabled,
-        MlirOptimizationPassState::ShadowEnabled,
         MlirOptimizationPassState::FallbackEnabled});
 
   GraphDef original_graph_def;
@@ -230,10 +183,8 @@
   verifyGraph(original_graph_def);
 }
 
-TEST_F(MlirGraphOptimizationPassTest,
-       OptimizationPassDoesNotFailShadowFallback) {
-  Init(Status::OK(), {MlirOptimizationPassState::ShadowEnabled,
-                      MlirOptimizationPassState::FallbackEnabled});
+TEST_F(MlirGraphOptimizationPassTest, OptimizationPassDoesNotFailFallback) {
+  Init(Status::OK(), {MlirOptimizationPassState::FallbackEnabled});
 
   GraphDef original_graph_def;
   graph_->ToGraphDef(&original_graph_def);