Add OpMetrics.autotuned to indicate whether an op has been autotuned.
PiperOrigin-RevId: 461734973
diff --git a/tensorflow/core/profiler/convert/op_metrics_db_combiner.cc b/tensorflow/core/profiler/convert/op_metrics_db_combiner.cc
index 7a2ed0f..1ecc1ce 100644
--- a/tensorflow/core/profiler/convert/op_metrics_db_combiner.cc
+++ b/tensorflow/core/profiler/convert/op_metrics_db_combiner.cc
@@ -15,6 +15,9 @@
#include "tensorflow/core/profiler/convert/op_metrics_db_combiner.h"
+#include <algorithm>
+#include <utility>
+
#include "absl/container/flat_hash_map.h"
#include "tensorflow/core/platform/logging.h"
#include "tensorflow/core/profiler/protobuf/op_metrics.pb.h"
@@ -70,6 +73,7 @@
dst->set_self_time_ps(src.self_time_ps() + dst->self_time_ps());
dst->set_flops(src.flops() + dst->flops());
dst->set_bytes_accessed(src.bytes_accessed() + dst->bytes_accessed());
+ dst->set_autotuned(dst->autotuned() || src.autotuned());
if (update_num_cores) {
dst->set_num_cores(src.num_cores() + dst->num_cores());
}
diff --git a/tensorflow/core/profiler/protobuf/op_metrics.proto b/tensorflow/core/profiler/protobuf/op_metrics.proto
index ad0f80f..7c888b5 100644
--- a/tensorflow/core/profiler/protobuf/op_metrics.proto
+++ b/tensorflow/core/profiler/protobuf/op_metrics.proto
@@ -26,7 +26,7 @@
}
// Metrics for an operation (accumulated over all occurrences).
-// Next ID: 23
+// Next ID: 24
message OpMetrics {
// HLO module id. 0 for TF ops.
uint64 hlo_module_id = 13;
@@ -79,6 +79,8 @@
// hardware computation. In the future this may be extended to include info
// such as signed/unsigned, int/fp, etc. Currently only the size is needed.
uint32 computation_primitive_size = 22;
+ // Whether the op is autotuned.
+ bool autotuned = 23;
reserved 4, 8, 9;
}