Updates NNAPI on TFLite with Telemetry API.
PiperOrigin-RevId: 395926795
Change-Id: I008b2b5452ee63f364a388af8314478ea6c64787
diff --git a/tensorflow/lite/nnapi/NeuralNetworksTypes.h b/tensorflow/lite/nnapi/NeuralNetworksTypes.h
index 18dfd30..2f400ef 100644
--- a/tensorflow/lite/nnapi/NeuralNetworksTypes.h
+++ b/tensorflow/lite/nnapi/NeuralNetworksTypes.h
@@ -498,6 +498,65 @@
*/
typedef struct ANeuralNetworksDevice ANeuralNetworksDevice;
+/**
+ * Diagnostic result codes.
+ */
+typedef enum {
+ ANNDIAG_NO_ERROR = 0,
+
+ /**
+ * Failure caused by failure to load support library driver.
+ */
+ ANNDIAG_FAILED_TO_LOAD_SL = 1,
+
+ /**
+ * Failure caused by failure to register HAL service.
+ */
+ ANNDIAG_FAILED_TO_REGISTER_SERVICE = 2,
+
+ /**
+ * General failure.
+ */
+ ANNDIAG_GENERAL_ERROR = 3,
+
+ /**
+ * Invalid argument
+ */
+ ANNDIAG_INVALID_ARGUMENT = 4,
+} ANeuralNetworksDiagnosticResultCode;
+
+/**
+ * Diagnostic data class.
+ */
+typedef enum {
+ ANNDIAG_DATA_CLASS_UNKNOWN = 0,
+ ANNDIAG_DATA_CLASS_OTHER = 1,
+ ANNDIAG_DATA_CLASS_FLOAT32 = 2,
+ ANNDIAG_DATA_CLASS_FLOAT16 = 3,
+ ANNDIAG_DATA_CLASS_QUANT = 4,
+ ANNDIAG_DATA_CLASS_MIXED = 5
+} ANeuralNetworksDiagnosticDataClass;
+
+/**
+ * Diagnostic execution mode.
+ */
+typedef enum {
+ ANNDIAG_EXECUTION_MODE_UNKNOWN = 0,
+ ANNDIAG_EXECUTION_MODE_ASYNC = 1,
+ ANNDIAG_EXECUTION_MODE_SYNC = 2,
+ ANNDIAG_EXECUTION_MODE_BURST = 3,
+ ANNDIAG_EXECUTION_MODE_ASYNC_WITH_DEPS = 4,
+} ANeuralNetworksDiagnosticExecutionMode;
+
+typedef struct ANeuralNetworksDiagnosticCompilationInfo
+ ANeuralNetworksDiagnosticCompilationInfo;
+typedef struct ANeuralNetworksDiagnosticExecutionInfo
+ ANeuralNetworksDiagnosticExecutionInfo;
+typedef void (*ANeuralNetworksDiagnosticCompilationFinishedCallback)(
+ const void* context, const ANeuralNetworksDiagnosticCompilationInfo* info);
+typedef void (*ANeuralNetworksDiagnosticExecutionFinishedCallback)(
+ const void* context, const ANeuralNetworksDiagnosticExecutionInfo* info);
+
// nn api function types
typedef int (*ANeuralNetworksMemory_createFromFd_fn)(
@@ -745,4 +804,102 @@
ANeuralNetworksExecution* execution, bool reusable);
typedef int64_t (*ANeuralNetworks_getRuntimeFeatureLevel_fn)();
+
+typedef int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef int64_t (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef const uint8_t* (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef const char* (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef uint64_t (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef bool (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef bool (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed_fn)(
+ const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo);
+
+typedef int32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef int64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef const uint8_t* (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef const char* (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef ANeuralNetworksDiagnosticExecutionMode (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef uint32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef bool (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed_fn)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+typedef void (*SL_ANeuralNetworksDiagnostic_registerCallbacks_fn)(
+ ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback,
+ ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback,
+ void* callbackContext);
+
#endif // TENSORFLOW_LITE_NNAPI_NEURALNETWORKSTYPES_H_
diff --git a/tensorflow/lite/nnapi/nnapi_implementation.cc b/tensorflow/lite/nnapi/nnapi_implementation.cc
index 3be2916..2722be7 100644
--- a/tensorflow/lite/nnapi/nnapi_implementation.cc
+++ b/tensorflow/lite/nnapi/nnapi_implementation.cc
@@ -23,7 +23,6 @@
#include <algorithm>
#include <cstdlib>
-#include "tensorflow/lite/nnapi/NeuralNetworksTypes.h"
#include "tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h"
#ifdef __ANDROID__
@@ -314,6 +313,83 @@
ANeuralNetworksExecution_enableInputAndOutputPadding);
LOAD_FUNCTION_OPTIONAL(libneuralnetworks,
ANeuralNetworksExecution_setReusable);
+
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed);
+ LOAD_FUNCTION_OPTIONAL(
+ libneuralnetworks,
+ SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed);
+
#ifndef __ANDROID__
// If libneuralnetworks.so is loaded, but android_sdk_version is not set,
// then determine android_sdk_version by testing which functions are
@@ -435,6 +511,57 @@
ASSIGN_SL_FUNCTION_TO_NNAPI(ANeuralNetworks_getRuntimeFeatureLevel);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed);
+ ASSIGN_SL_FUNCTION_TO_NNAPI(
+ SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed);
+
// There are several functions that are defined in the SL but are not yet used
// in the delegate:
// * ANeuralNetworksDevice_wait
diff --git a/tensorflow/lite/nnapi/nnapi_implementation.h b/tensorflow/lite/nnapi/nnapi_implementation.h
index f5f1818..005ec01 100644
--- a/tensorflow/lite/nnapi/nnapi_implementation.h
+++ b/tensorflow/lite/nnapi/nnapi_implementation.h
@@ -1784,6 +1784,326 @@
* Available since NNAPI feature level 5.
*/
int64_t (*ANeuralNetworks_getRuntimeFeatureLevel)();
+
+ /**
+ * Gets the ID that identifies a single session of client interacting with
+ * NNAPI runtime.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Session info id.
+ */
+ int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets NNAPI version.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return NNAPI version.
+ */
+ int64_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the hash of the model architecture (without weights).
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Model hash.
+ */
+ const uint8_t* (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the device IDs as a comma-concatenated string.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Device ID.
+ */
+ const char* (*SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the error code.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Error code.
+ */
+ int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the type of tensors used for inputs.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Input data class.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the type of tensors used for outputs.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Output data class.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets how many nanoseconds elapsed when compiling the model.
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Time to compile the model in nanoseconds. UINT64_MAX indicates that
+ * timing information is not available.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Is caching enabled?
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether caching is enabled.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Is control flow used?
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether control flow was used.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Are dynamic tensors used?
+ *
+ * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether dynamic tensors were used.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * Gets the ID that identifies a single session of client interacting with
+ * NNAPI runtime.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Session info id.
+ */
+ int32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets NNAPI version.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return NNAPI version.
+ */
+ int64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the hash of the model architecture (without weights).
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Model hash.
+ */
+ const uint8_t* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the device IDs as a comma-concatenated string.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Device ID.
+ */
+ const char* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the execution mode.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Execution mode.
+ */
+ ANeuralNetworksDiagnosticExecutionMode (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the input data class.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Input data class.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the output data class.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Output data class.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the error code.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Error code.
+ */
+ uint32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the time taken to execute from runtime, including runtime/ipc
+ * overhead.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Time taken to execute as measured by the runtime in nanoseconds.
+ * UINT64_MAX indicates that timing information is not available.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the time taken to execute in the driver, excluding runtime/ipc
+ * overhead.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Time taken to execute on the driver in nanoseconds. UINT64_MAX
+ * indicates that timing information is not available.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Gets the time taken to execute on the hardware, excluding driver overhead.
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Time taken to execute on the hardware in nanoseconds. UINT64_MAX
+ * indicates that timing information is not available.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Is caching enabled?
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether caching is enabled.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Is control flow used?
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether control flow was used.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Are dynamic tensors used?
+ *
+ * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info
+ * object.
+ * @return Whether dynamic tensors were used.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * Sets the callbacks to be called when compilations or executions finish.
+ *
+ * Example usage:
+ *
+ * // Callback to be invoked whenever a compilation has completed.
+ * void compilationCallback(void* context,
+ * ANeuralNetworksDiagnosticCompilationInfo* info) {
+ * // The context object can be used to store state without the use of a
+ * global variable. ExampleLoggerObject* logger =
+ * static_cast<ExampleLoggerObject*>(context);
+ *
+ * // Calls to getters to get the details...
+ * const int32_t sessionId =
+ * ANeuralNetworksDiagnosticCompilationInfo_getSessionId(info);
+ *
+ * ...
+ *
+ * logger->write(...);
+ * }
+ *
+ * void executionCallback(void* context,
+ * ANeuralNetworksDiagnosticExecutionInfo* info) {
+ * ...
+ * }
+ *
+ * ExampleLoggerObject exampleLoggerObject;
+ * ANeuralNetworksDiagnostic_registerCallbacks(&compilationCallback,
+ * &executionCallback, static_cast<void*>(&exampleLoggerObject));
+ *
+ * @param compilationCallback The compilation callback to set.
+ * @param executionCallback The execution callback to set.
+ * @param callbackContext The context to be passed to the callbacks when they
+ * are invoked. The context object may be used by multiple threads
+ * simulatenously, so it must be thread-safe.
+ */
+ void (*SL_ANeuralNetworksDiagnostic_registerCallbacks)(
+ ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback,
+ ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback,
+ void* callbackContext);
};
/**
diff --git a/tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h b/tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h
index d55374c..d39ed3d 100644
--- a/tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h
+++ b/tensorflow/lite/nnapi/sl/public/NeuralNetworksSupportLibraryImpl.h
@@ -32,6 +32,7 @@
* - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES
*/
+#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
@@ -56,13 +57,13 @@
* a strict superset of NnApiSLDriverImplFL(N), and NnApiSLDriverImplFL(M)* can
* be reinterpret_cast to NnApiSLDriverImplFL(N)* safely.
*/
-struct NnApiSLDriverImpl {
+typedef struct NnApiSLDriverImpl {
/**
* Version of the NnApiSLDriverImpl struct. Uses {@link FeatureLevelCode}
* values for versioning.
*/
int64_t implFeatureLevel;
-};
+} NnApiSLDriverImpl;
/**
* NnApiSLDriverImpl for an Updatable SL Driver implementing {@link
@@ -71,7 +72,7 @@
* This struct must set its implFeatureLevel to {@link
* ANEURALNETWORKS_FEATURE_LEVEL_5}.
*/
-struct NnApiSLDriverImplFL5 {
+typedef struct NnApiSLDriverImplFL5 {
/**
* Base type with version information. Allows to cast a pointer of this type
* to NnApiSLDriverImpl* with valid version information.
@@ -771,10 +772,288 @@
int64_t (*ANeuralNetworks_getRuntimeFeatureLevel)();
/**
- * 72nd pointer required to align to 8 bytes on 32bit archs.
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}, at the feature
+ * level of this NnApiSLDriver struct.
*/
- void (*reserved_placeholder1)();
-};
+ int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ int64_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ const uint8_t* (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ const char* (*SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}.
+ * Behavior, arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}, at
+ * the feature level of this NnApiSLDriver struct.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}.
+ * Behavior, arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed)(
+ const ANeuralNetworksDiagnosticCompilationInfo*
+ diagnosticCompilationInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ int32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ int64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ const uint8_t* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ const char* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ ANeuralNetworksDiagnosticExecutionMode (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ ANeuralNetworksDiagnosticDataClass (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ uint32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}.
+ * Behavior, arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}, at
+ * the feature level of this NnApiSLDriver struct.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}.
+ * Behavior, arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}, at
+ * the feature level of this NnApiSLDriver struct.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos}.
+ * Behavior, arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos},
+ * at the feature level of this NnApiSLDriver struct.
+ */
+ uint64_t (
+ *SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}, at the feature
+ * level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}. Behavior,
+ * arguments, and outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}, at the
+ * feature level of this NnApiSLDriver struct.
+ */
+ bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed)(
+ const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo);
+
+ /**
+ * SL Driver implementation of {@link
+ * SL_ANeuralNetworksDiagnostic_registerCallbacks}. Behavior, arguments, and
+ * outputs match NNAPI Runtime function {@link
+ * SL_ANeuralNetworksDiagnostic_registerCallbacks}, at the feature level of
+ * this NnApiSLDriver struct.
+ */
+ void (*SL_ANeuralNetworksDiagnostic_registerCallbacks)(
+ ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback,
+ ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback,
+ void* callbackContext);
+} NnApiSLDriverImplFL5;
#ifdef __cplusplus
} // extern "C"