Merge "Add MobileNet v3 groups"
diff --git a/Android.mk b/Android.mk
index 8b08f69..d69fa86 100644
--- a/Android.mk
+++ b/Android.mk
@@ -17,7 +17,6 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
-
LOCAL_STATIC_JAVA_LIBRARIES := androidx.test.rules
LOCAL_JAVA_LIBRARIES := android.test.runner.stubs android.test.base.stubs
@@ -48,7 +47,7 @@
LOCAL_COMPATIBILITY_SUITE += device-tests
LOCAL_SRC_FILES := $(call all-java-files-under, src)
-LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni
+LOCAL_JNI_SHARED_LIBRARIES := libnnbenchmark_jni librandom_graph_test_jni
# need fread_unlocked in version 28
LOCAL_SDK_VERSION := 28
diff --git a/AndroidManifest.xml b/AndroidManifest.xml
index 777a005..4ac42c5 100644
--- a/AndroidManifest.xml
+++ b/AndroidManifest.xml
@@ -51,6 +51,11 @@
<action android:name="android.intent.action.MAIN" />
</intent-filter>
</activity>
+ <activity android:name="com.android.nn.crashtest.app.NNRandomGraphTestActivity">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ </intent-filter>
+ </activity>
<service android:name="com.android.nn.crashtest.core.OutOfProcessCrashTestService"
android:process=":CrashTest" />
diff --git a/README.txt b/README.txt
index 0a91f48..99042b9 100644
--- a/README.txt
+++ b/README.txt
@@ -67,4 +67,8 @@
early termination
* memory-mapped-model-load-stress: runs a series of parallel model compilation with memory mapped
-TFLite models
\ No newline at end of file
+TFLite models
+
+* model-load-random-stress: test compiling a large set of randomly generated models
+
+* inference-random-stress: test running a large set of randomly generated models
\ No newline at end of file
diff --git a/build_and_run_benchmark.sh b/build_and_run_benchmark.sh
index 491ad4b..b1ccec7 100755
--- a/build_and_run_benchmark.sh
+++ b/build_and_run_benchmark.sh
@@ -7,13 +7,14 @@
# which is not logged.
-OPTS="$(getopt -o f:r -l filter-driver:,include-nnapi-reference,nnapi-reference-only -- "$@")"
+OPTS="$(getopt -o f:rb -l filter-driver:,include-nnapi-reference,nnapi-reference-only,skip-build -- "$@")"
if [ $? -ne 0 ]; then
echo "Invalid arguments, accepted options are"
echo " -f <regex> | --filter-driver <regex> : to run crash tests only on the drivers (ignoring nnapi-reference) matching the specified regular expression"
echo " -r | --include-nnapi-reference : to include nnapi-reference in target drivers"
echo " --nnapi-reference-only : to run tests only vs nnapi-reference"
+ echo " -b | --skip-build : skip build and installation of tests"
exit
fi
@@ -21,6 +22,7 @@
DRIVER_FILTER_OPT=""
INCLUDE_NNAPI_REF_OPT=""
+BUILD_AND_INSTALL=true
while [ $# -gt 0 ] ; do
case "$1" in
-f|--filter-driver)
@@ -36,6 +38,10 @@
INCLUDE_NNAPI_REF_OPT="-e nnCrashtestIncludeNnapiReference true"
shift
;;
+ -b|--skip-build)
+ BUILD_AND_INSTALL=false
+ shift
+ ;;
--)
shift
break
@@ -48,6 +54,7 @@
MODE="${1:-scoring}"
INSTALL_NATIVE_TESTS=false
+CRASH_TEST_APP="NeuralNetworksApiCrashTest"
APP="NeuralNetworksApiBenchmark"
case "$MODE" in
scoring)
@@ -61,36 +68,45 @@
;;
parallel-inference-stress)
CLASS=com.android.nn.crashtest.app.NNParallelCrashResistantInferenceTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
;;
parallel-inference-stress-in-process)
CLASS=com.android.nn.crashtest.app.NNParallelInProcessInferenceTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
;;
client-early-termination-stress)
CLASS=com.android.nn.crashtest.app.NNClientEarlyTerminationTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
;;
multi-process-inference-stress)
CLASS=com.android.nn.crashtest.app.NNMultipleProcessInferenceTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
INSTALL_NATIVE_TESTS=true
;;
multi-process-model-load-stress)
CLASS=com.android.nn.crashtest.app.NNMultipleProcessModelLoadTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
INSTALL_NATIVE_TESTS=true
;;
memory-mapped-model-load-stress)
CLASS=com.android.nn.crashtest.app.NNMemoryMappedModelCompilationTest
- APP="NeuralNetworksApiCrashTest"
+ APP="$CRASH_TEST_APP"
+ ;;
+ model-load-random-stress)
+ APP="$CRASH_TEST_APP"
+ CLASS=com.android.nn.crashtest.app.NNRandomGraphLoadTest
+ ;;
+ inference-random-stress)
+ APP="$CRASH_TEST_APP"
+ CLASS=com.android.nn.crashtest.app.NNRandomGraphExecutionTest
;;
*)
echo "Unknown execution mode: $1"
echo "Known modes: scoring (default), inference-stress, model-loading-stress, " \
"parallel-inference-stress, parallel-inference-stress-in-process, " \
"client-early-termination-stress, multi-process-inference-stress, " \
- "multi-process-model-load-stress memory-mapped-model-load-stress"
+ "multi-process-model-load-stress memory-mapped-model-load-stress, " \
+ "model-load-random-stress inference-random-stress"
exit 1
;;
esac
@@ -104,23 +120,25 @@
set -e
cd $ANDROID_BUILD_TOP
-# Build and install benchmark app
-TMPFILE=$(mktemp)
-build/soong/soong_ui.bash --make-mode ${APP} 2>&1 | tee ${TMPFILE}
-TARGET_ARCH=$(cat ${TMPFILE} | grep TARGET_ARCH= | sed -e 's/TARGET_ARCH=//')
-if [ "${TARGET_ARCH}" = "aarch64" ]; then
- APK_DIR=arm64
-else
- APK_DIR=${TARGET_ARCH}
-fi
-if ! adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk; then
- adb uninstall com.android.nn.benchmark.app
- adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk
-fi
+if [ "$BUILD_AND_INSTALL" = true ]; then
+ # Build and install benchmark app
+ TMPFILE=$(mktemp)
+ build/soong/soong_ui.bash --make-mode ${APP} 2>&1 | tee ${TMPFILE}
+ TARGET_ARCH=$(cat ${TMPFILE} | grep TARGET_ARCH= | sed -e 's/TARGET_ARCH=//')
+ if [ "${TARGET_ARCH}" = "aarch64" ]; then
+ APK_DIR=arm64
+ else
+ APK_DIR=${TARGET_ARCH}
+ fi
+ if ! adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk; then
+ adb uninstall com.android.nn.benchmark.app
+ adb install -r $OUT/testcases/${APP}/${APK_DIR}/${APP}.apk
+ fi
-if [ "$INSTALL_NATIVE_TESTS" = true ]; then
- build/soong/soong_ui.bash --make-mode nn_stress_test
- adb push $OUT/system/bin/nn_stress_test /bin/
+ if [ "$INSTALL_NATIVE_TESTS" = true ]; then
+ build/soong/soong_ui.bash --make-mode nn_stress_test
+ adb push $OUT/system/bin/nn_stress_test /bin/
+ fi
fi
# Should we figure out if we run on release device
diff --git a/jni/Android.bp b/jni/Android.bp
index 50d99e8..80872da 100644
--- a/jni/Android.bp
+++ b/jni/Android.bp
@@ -38,6 +38,9 @@
"-Wno-unused-parameter",
],
stl: "libc++_static",
+ strip: {
+ keep_symbols: true,
+ },
}
cc_library_shared {
@@ -48,10 +51,40 @@
cc_library_static {
name: "libnnbenchmark_jni_static",
defaults: ["libnnbenchmark_jni_defaults"],
+ export_include_dirs: ["."],
}
-cc_library_headers {
- name: "libnnbenchmark_jni_headers",
+cc_library {
+ name: "librandom_graph_test_jni",
+ defaults: ["neuralnetworks_float16"],
+ srcs: [
+ "random_graph_test_jni.cpp",
+ ],
+ header_libs: [
+ "jni_headers",
+ "libneuralnetworks_headers_ndk",
+ "libneuralnetworks_generated_test_harness_headers_for_cts",
+ ],
+ shared_libs: [
+ "libandroid",
+ "liblog",
+ "libneuralnetworks",
+ ],
+ whole_static_libs: [
+ "NeuralNetworksTest_random_graph",
+ ],
+ static_libs: [
+ "libbase_ndk",
+ "libgmock_ndk",
+ "libgtest_ndk_c++",
+ ],
+ cflags: [
+ "-Wno-sign-compare",
+ "-Wno-unused-parameter",
+ ],
sdk_version: "current",
- export_include_dirs: ["."],
+ stl: "libc++_static",
+ strip: {
+ keep_symbols: true,
+ },
}
diff --git a/jni/benchmark_jni.cpp b/jni/benchmark_jni.cpp
index 8565e7d..9d741a3 100644
--- a/jni/benchmark_jni.cpp
+++ b/jni/benchmark_jni.cpp
@@ -326,7 +326,6 @@
&rentry.meanSquareErrors[0],
rentry.meanSquareErrors.size() * sizeof(float));
env->ReleaseFloatArrayElements(meanSquareErrorArray, bytes, 0);
- env->DeleteLocalRef(meanSquareErrorArray);
}
{
jfloat *bytes = env->GetFloatArrayElements(maxSingleErrorArray, nullptr);
@@ -334,7 +333,6 @@
&rentry.maxSingleErrors[0],
rentry.maxSingleErrors.size() * sizeof(float));
env->ReleaseFloatArrayElements(maxSingleErrorArray, bytes, 0);
- env->DeleteLocalRef(maxSingleErrorArray);
}
}
@@ -365,8 +363,14 @@
env->CallBooleanMethod(resultList, list_add, object);
if (env->ExceptionCheck()) { return false; }
- // Releasing local reference to object to avoid local reference table overflow
+ // Releasing local references to objects to avoid local reference table overflow
// if tests is set to run for long time.
+ if (meanSquareErrorArray) {
+ env->DeleteLocalRef(meanSquareErrorArray);
+ }
+ if (maxSingleErrorArray) {
+ env->DeleteLocalRef(maxSingleErrorArray);
+ }
env->DeleteLocalRef(object);
}
}
diff --git a/jni/crashtest_jni.cpp b/jni/crashtest_jni.cpp
index 24ea7da..3540a76 100644
--- a/jni/crashtest_jni.cpp
+++ b/jni/crashtest_jni.cpp
@@ -25,7 +25,7 @@
#define LOG_TAG "CrashTest"
extern "C" JNIEXPORT void JNICALL
-Java_com_android_nn_benchmark_crashtest_test_CrashingCrashTest_nativeSegViolation(
+Java_com_android_nn_crashtest_core_test_CrashingCrashTest_nativeSegViolation(
JNIEnv* env, jobject /* this */) {
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "Causing NATIVE crash");
diff --git a/jni/random_graph_test_jni.cpp b/jni/random_graph_test_jni.cpp
new file mode 100644
index 0000000..0d8558a
--- /dev/null
+++ b/jni/random_graph_test_jni.cpp
@@ -0,0 +1,580 @@
+/**
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "NN_RAND_MODEL"
+
+#include <android-base/logging.h>
+#include <jni.h>
+
+#include <algorithm>
+#include <fstream>
+#include <memory>
+#include <optional>
+#include <random>
+#include <set>
+#include <sstream>
+#include <string>
+#include <vector>
+
+#include "GeneratedTestUtils.h"
+#include "fuzzing/OperationManager.h"
+#include "fuzzing/RandomGraphGenerator.h"
+#include "fuzzing/RandomGraphGeneratorUtils.h"
+
+extern "C" JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
+ android::base::InitLogging(nullptr, android::base::LogdLogger());
+ android::base::SetMinimumLogSeverity(android::base::INFO);
+ return JNI_VERSION_1_6;
+}
+
+enum RandomModelExecutionResult {
+ kSuccess = 0,
+ kFailedCompilation,
+ kFailedExecution,
+ kFailedOtherNnApiCall,
+ // The following conditions are for internal retry
+ kInvalidModelGenerated,
+ kUnsupportedModelGenerated
+};
+
+class FuzzerLogRAII {
+ public:
+ FuzzerLogRAII(const std::string& nnapiLogPath) {
+ using android::nn::fuzzing_test::alignedString;
+ using android::nn::fuzzing_test::Logger;
+ using android::nn::fuzzing_test::LoggerStream;
+
+ NN_FUZZER_LOG_WRITE_FATAL_TO_SYSLOG(LOG_TAG);
+
+ mFuzzerLogOpen = false;
+ if (!nnapiLogPath.empty()) {
+ // Checking if we can write to target file
+ std::ofstream os;
+ os.open(nnapiLogPath);
+
+ if (os.fail()) {
+ LOG(ERROR) << "Opening file " << nnapiLogPath << " failed";
+ } else {
+ NN_FUZZER_LOG_INIT(nnapiLogPath);
+ LOG(INFO) << "Logging NNAPI to file " << nnapiLogPath;
+ mFuzzerLogOpen = true;
+ }
+ }
+ }
+ ~FuzzerLogRAII() {
+ if (mFuzzerLogOpen) {
+ using android::nn::fuzzing_test::alignedString;
+ using android::nn::fuzzing_test::Logger;
+ using android::nn::fuzzing_test::LoggerStream;
+
+ NN_FUZZER_LOG_CLOSE;
+ }
+ }
+
+ private:
+ bool mFuzzerLogOpen;
+};
+
+std::vector<test_helper::TestOperationType> getOperationsInModel(
+ const test_helper::TestModel& testModel) {
+ std::vector<test_helper::TestOperationType> result;
+ testModel.forEachSubgraph(
+ [&result](const test_helper::TestSubgraph& subgraph) {
+ for (const auto& operation : subgraph.operations) {
+ result.push_back(operation.type);
+ }
+ });
+
+ return result;
+}
+
+const ANeuralNetworksDevice* findDeviceByName(const char* deviceName) {
+ if (!deviceName) return nullptr;
+
+ std::string deviceNameStr(deviceName);
+ uint32_t numDevices = 0;
+ ANeuralNetworks_getDeviceCount(&numDevices);
+
+ for (uint32_t i = 0; i < numDevices; i++) {
+ ANeuralNetworksDevice* device = nullptr;
+ const char* buffer = nullptr;
+ int getDeviceResult = ANeuralNetworks_getDevice(i, &device);
+ if (getDeviceResult != ANEURALNETWORKS_NO_ERROR) {
+ LOG(ERROR) << "Unable to get NNAPI device " << i << ": "
+ << getDeviceResult;
+ return nullptr;
+ }
+
+ int getDeviceNameResult = ANeuralNetworksDevice_getName(device, &buffer);
+ if (getDeviceNameResult != ANEURALNETWORKS_NO_ERROR) {
+ LOG(ERROR) << "Unable to get name of NNAPI device " << i << ": "
+ << getDeviceNameResult;
+ return nullptr;
+ }
+
+ if (deviceNameStr == buffer) {
+ return device;
+ }
+ }
+
+ LOG(ERROR) << "No device with name " << deviceNameStr;
+ return nullptr;
+}
+
+const ANeuralNetworksDevice* getNnApiReferenceDevice() {
+ return findDeviceByName("nnapi-reference");
+}
+
+class RandomGraphGenerator {
+ public:
+ RandomGraphGenerator(const ANeuralNetworksDevice* device,
+ const std::string& deviceName,
+ const std::string& testName, uint32_t numOperations,
+ uint32_t dimensionRange, std::string nnapiLogPath,
+ std::string failedModelDumpPath)
+ : mTestName(testName),
+ mDevice(device),
+ mDeviceName(deviceName),
+ mNnApiReference(getNnApiReferenceDevice()),
+ mSupportedOpsFilter(),
+ mNumOperations(numOperations),
+ mDimensionRange(dimensionRange),
+ nnapiFuzzerLogRAII(nnapiLogPath),
+ mFailedModelDumpPath(failedModelDumpPath) {}
+
+ RandomModelExecutionResult init() {
+ // Limiting the ops in the generator to a subset we know the target device
+ // supports to avoid failing the test because we are unable to find a
+ // suitable model to compile.
+ RandomModelExecutionResult filterInitResult;
+ filterInitResult =
+ HalVersionsSupportedByDevice(&mSupportedOpsFilter.versions);
+ if (filterInitResult != kSuccess) return filterInitResult;
+
+ filterInitResult =
+ OperandTypesSupportedByDevice(&mSupportedOpsFilter.dataTypes);
+ if (filterInitResult != kSuccess) return filterInitResult;
+
+ return OperationsSupportedByDevice(mSupportedOpsFilter,
+ &mSupportedOpsFilter.opcodes);
+ }
+
+ RandomModelExecutionResult runRandomModel(bool compilationOnly) {
+ using android::nn::generated_tests::createModel;
+ using android::nn::generated_tests::createRequest;
+ using android::nn::generated_tests::GeneratedModel;
+ using android::nn::test_wrapper::Compilation;
+ using android::nn::test_wrapper::Execution;
+ using android::nn::wrapper::Result;
+
+ std::optional<test_helper::TestModel> testModel =
+ createRandomModel(mSupportedOpsFilter);
+ if (!testModel) {
+ LOG(ERROR) << mTestName << ": No model generated";
+ return kInvalidModelGenerated;
+ }
+
+ GeneratedModel model;
+ createModel(*testModel, &model);
+ if (!model.isValid()) {
+ LOG(ERROR) << mTestName << ": Randomly generated model is not valid";
+ return kInvalidModelGenerated;
+ }
+ auto modelFinishResult = model.finish();
+ if (modelFinishResult != Result::NO_ERROR) {
+ LOG(ERROR) << mTestName << ": Failed to finish model, result is "
+ << static_cast<int>(modelFinishResult);
+ return kInvalidModelGenerated;
+ }
+
+ bool fullySupportedModel = false;
+ if (mDevice) {
+ std::unique_ptr<bool[]> opsSupportedFlags =
+ std::make_unique<bool[]>(mNumOperations);
+ std::fill(opsSupportedFlags.get(),
+ opsSupportedFlags.get() + mNumOperations, false);
+ // Check if the device fully supports the graph.
+ int supportedOpResult =
+ ANeuralNetworksModel_getSupportedOperationsForDevices(
+ model.getHandle(), &mDevice, 1, opsSupportedFlags.get());
+ if (supportedOpResult != ANEURALNETWORKS_NO_ERROR) {
+ return kFailedOtherNnApiCall;
+ }
+
+ // accepting the model even if partially supported since we found that it
+ // is extremely difficult to have fully supported models.
+ // We could consider a minimum number (or percentage of total number) of
+ // operations to be supported to consider the model acceptable. For the
+ // moment we just accept any model that has any supported op.
+ bool supported = std::any_of(opsSupportedFlags.get(),
+ opsSupportedFlags.get() + mNumOperations,
+ [](bool v) { return v; });
+ if (!supported) {
+ return kUnsupportedModelGenerated;
+ }
+
+ fullySupportedModel = std::all_of(
+ opsSupportedFlags.get(), opsSupportedFlags.get() + mNumOperations,
+ [](bool v) { return v; });
+ }
+
+ std::vector<const ANeuralNetworksDevice*> devices;
+ if (mDevice) {
+ devices.push_back(mDevice);
+ if (!fullySupportedModel) {
+ // If model is not fully supported we allow NNAPI to use reference
+ // implementation. This is to avoid having this test constantly
+ // nullified by the inability of finding a fully supported model.
+ LOG(VERBOSE) << "Allowing model to be partially executed on NNAPI reference device";
+ devices.push_back(mNnApiReference);
+ }
+ }
+
+ auto [compilationResult, compilation] = CreateCompilation(model, devices);
+ if (compilationResult != Result::NO_ERROR) {
+ LOG(WARNING) << mTestName << ": Compilation preparation failed with result "
+ << static_cast<int>(compilationResult);
+
+ dumpModel(*testModel);
+ return kFailedCompilation;
+ }
+ compilationResult = compilation.finish();
+ if (compilationResult != Result::NO_ERROR) {
+ LOG(WARNING) << mTestName << ": Compilation failed with result "
+ << static_cast<int>(compilationResult);
+
+ dumpModel(*testModel);
+ return kFailedCompilation;
+ }
+
+ if (!compilationOnly) {
+ Execution execution(&compilation);
+ std::vector<test_helper::TestBuffer> outputs;
+ createRequest(*testModel, &execution, &outputs);
+
+ // Compute result.
+ Result executeReturn = execution.compute();
+ if (executeReturn != Result::NO_ERROR) {
+ LOG(WARNING) << mTestName << ": Execution failed with result "
+ << static_cast<int>(executeReturn);
+
+ dumpModel(*testModel);
+ return kFailedExecution;
+ }
+ }
+
+ return kSuccess;
+ }
+
+ const std::string mTestName;
+
+ private:
+ android::nn::fuzzing_test::RandomGraph mRandomGraph;
+ std::random_device mSeedGenerator;
+ const ANeuralNetworksDevice* mDevice;
+ // empty string if mDevice is null
+ const std::string mDeviceName;
+ const ANeuralNetworksDevice* mNnApiReference;
+ android::nn::fuzzing_test::OperationFilter mSupportedOpsFilter;
+ const uint32_t mNumOperations;
+ const uint32_t mDimensionRange;
+ FuzzerLogRAII nnapiFuzzerLogRAII;
+ const std::string mFailedModelDumpPath;
+
+ std::optional<test_helper::TestModel> createRandomModel(
+ const android::nn::fuzzing_test::OperationFilter& opFilter) {
+ android::nn::fuzzing_test::OperationManager::get()->applyFilter(opFilter);
+
+ auto seed = mSeedGenerator();
+ if (!mRandomGraph.generate(seed, mNumOperations, mDimensionRange)) {
+ return std::nullopt;
+ }
+
+ return {mRandomGraph.createTestModel()};
+ }
+
+ RandomModelExecutionResult HalVersionsSupportedByDevice(
+ std::vector<test_helper::TestHalVersion>* result) {
+ if (!mDevice) {
+ return kSuccess;
+ }
+
+ int64_t featureLevel;
+ auto getDeviceFeatureLevelResult =
+ ANeuralNetworksDevice_getFeatureLevel(mDevice, &featureLevel);
+ if (getDeviceFeatureLevelResult != ANEURALNETWORKS_NO_ERROR) {
+ LOG(ERROR) << mTestName << ": Unable to query device feature level";
+ return kFailedOtherNnApiCall;
+ }
+
+ if (featureLevel == 27) *result = {test_helper::TestHalVersion::V1_0};
+ if (featureLevel == 28) *result = {test_helper::TestHalVersion::V1_1};
+ if (featureLevel == 29) *result = {test_helper::TestHalVersion::V1_2};
+
+ return kSuccess;
+ }
+
+ RandomModelExecutionResult OperandTypesSupportedByDevice(
+ std::vector<test_helper::TestOperandType>* result) {
+ if (!mDevice) {
+ return kSuccess;
+ }
+
+ int32_t deviceType;
+ auto getDeviceTypeResult =
+ ANeuralNetworksDevice_getType(mDevice, &deviceType);
+ if (getDeviceTypeResult != ANEURALNETWORKS_NO_ERROR) {
+ LOG(ERROR) << mTestName << ": Unable to query device type";
+ return kFailedOtherNnApiCall;
+ }
+ using test_helper::TestOperandType;
+ switch (deviceType) {
+ case ANEURALNETWORKS_DEVICE_GPU:
+ // No quantized types
+ *result = {
+ TestOperandType::FLOAT32, TestOperandType::INT32,
+ TestOperandType::UINT32, TestOperandType::TENSOR_FLOAT32,
+ TestOperandType::TENSOR_INT32, TestOperandType::BOOL,
+ TestOperandType::TENSOR_FLOAT16, TestOperandType::TENSOR_BOOL8,
+ TestOperandType::FLOAT16};
+ break;
+ case ANEURALNETWORKS_DEVICE_CPU:
+ case ANEURALNETWORKS_DEVICE_ACCELERATOR:
+ result->clear(); // no filter
+ break;
+ case ANEURALNETWORKS_DEVICE_UNKNOWN:
+ case ANEURALNETWORKS_DEVICE_OTHER:
+ if (mDeviceName.find("dsp") != std::string::npos) {
+ *result = {TestOperandType::INT32,
+ TestOperandType::UINT32,
+ TestOperandType::TENSOR_INT32,
+ TestOperandType::BOOL,
+ TestOperandType::TENSOR_BOOL8,
+ TestOperandType::TENSOR_QUANT8_ASYMM,
+ TestOperandType::TENSOR_QUANT16_SYMM,
+ TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL,
+ TestOperandType::TENSOR_QUANT16_ASYMM,
+ TestOperandType::TENSOR_QUANT8_SYMM,
+ TestOperandType::TENSOR_QUANT8_ASYMM_SIGNED};
+ break;
+ }
+ FALLTHROUGH_INTENDED;
+ default:
+ result->clear(); // no filter
+ }
+ return kSuccess;
+ }
+
+ /// Finds some operations supported by the device
+ RandomModelExecutionResult OperationsSupportedByDevice(
+ const android::nn::fuzzing_test::OperationFilter& basicFilter,
+ std::vector<test_helper::TestOperationType>* result) {
+ if (!mDevice) {
+ return kSuccess;
+ }
+
+ constexpr int kNumOfAttempts = 50;
+ std::set<test_helper::TestOperationType> supportedOps;
+ for (int i = 0; i < kNumOfAttempts; i++) {
+ std::optional<test_helper::TestModel> testModel =
+ createRandomModel(basicFilter);
+ if (!testModel) {
+ LOG(ERROR)
+ << mTestName
+ << ": Unable to generate a model trying to understand the ops "
+ "supported by target device";
+ continue;
+ }
+
+ android::nn::generated_tests::GeneratedModel model;
+ createModel(*testModel, &model);
+ if (!model.isValid()) {
+ LOG(WARNING) << mTestName << ": Randomly generated model is not valid";
+ continue;
+ }
+ auto modelFinishResult = model.finish();
+ if (modelFinishResult != android::nn::wrapper::Result::NO_ERROR) {
+ LOG(WARNING) << "Model::finish call failed, result is "
+ << static_cast<int>(modelFinishResult);
+ continue;
+ }
+
+ std::unique_ptr<bool[]> opsSupportedFlags =
+ std::make_unique<bool[]>(mNumOperations);
+ std::fill(opsSupportedFlags.get(),
+ opsSupportedFlags.get() + mNumOperations, false);
+
+ // Check if the device fully supports the graph.
+ int supportedOpResult =
+ ANeuralNetworksModel_getSupportedOperationsForDevices(
+ model.getHandle(), &mDevice, 1, opsSupportedFlags.get());
+ if (supportedOpResult != ANEURALNETWORKS_NO_ERROR) {
+ return kFailedOtherNnApiCall;
+ }
+
+ std::vector<test_helper::TestOperationType> opsInModel =
+ getOperationsInModel(*testModel);
+ for (int opIndex = 0; opIndex < mNumOperations; opIndex++) {
+ test_helper::TestOperationType currOp = opsInModel[opIndex];
+ if (opsSupportedFlags[opIndex]) {
+ supportedOps.insert(currOp);
+ }
+ }
+ }
+ std::copy(supportedOps.begin(), supportedOps.end(),
+ std::back_inserter(*result));
+
+ if (result->empty()) {
+ LOG(WARNING)
+ << mTestName
+ << ": Could not find any operation supported by target device."
+ << " Returning no filter.";
+ } else {
+ LOG(INFO) << mTestName << ": Filtering to " << result->size()
+ << " supported operations";
+ }
+
+ return kSuccess;
+ }
+
+ void dumpModel(const test_helper::TestModel& testModel) {
+ if (mFailedModelDumpPath.empty()) return;
+
+ LOG(INFO) << mTestName << ": Dumping model failing tests to "
+ << mFailedModelDumpPath;
+
+ std::ofstream os(mFailedModelDumpPath);
+ ASSERT_TRUE(os.is_open());
+ os << "# Generated from " << mTestName << ". Do not edit.\n\n";
+ test_helper::SpecDumper dumper(testModel, os);
+ dumper.dumpTestModel();
+ }
+
+ std::pair<android::nn::wrapper::Result,
+ android::nn::test_wrapper::Compilation>
+ CreateCompilation(const android::nn::generated_tests::GeneratedModel& model,
+ const std::vector<const ANeuralNetworksDevice*>& devices) {
+ using android::nn::test_wrapper::Compilation;
+ if (!devices.empty())
+ return Compilation::createForDevices(&model, devices);
+ else
+ return {android::nn::wrapper::Result::NO_ERROR, Compilation(&model)};
+ }
+};
+
+extern "C" JNIEXPORT jint JNICALL
+Java_com_android_nn_crashtest_core_RandomGraphTest_runRandomModel(
+ JNIEnv* env, jclass /* static method */, jlong _generatorHandle,
+ jboolean _compilationOnly, jlong _maxModelSearchTimeSeconds) {
+ RandomGraphGenerator* graphGenerator =
+ reinterpret_cast<RandomGraphGenerator*>(_generatorHandle);
+
+ std::time_t startTime = std::time(nullptr);
+
+ int result = kSuccess;
+ int modelSearchAttempt = 0;
+ while (std::difftime(std::time(nullptr), startTime) <
+ _maxModelSearchTimeSeconds) {
+ modelSearchAttempt++;
+
+ result = graphGenerator->runRandomModel(_compilationOnly);
+
+ // if by chance we generated an invalid model or a model that couldn't run
+ // on the target accelerator we will try again.
+ if (result != kInvalidModelGenerated &&
+ result != kUnsupportedModelGenerated) {
+ break;
+ }
+ }
+
+ if (result == kInvalidModelGenerated ||
+ result == kUnsupportedModelGenerated) {
+ LOG(WARNING) << graphGenerator->mTestName
+ << ": Max time to search for a model of "
+ << static_cast<long>(_maxModelSearchTimeSeconds)
+ << "seconds reached. Aborting test at attempt "
+ << modelSearchAttempt;
+ }
+
+ return result;
+}
+
+extern "C" JNIEXPORT jlong JNICALL
+com_android_nn_crashtest_core_RandomGraphTest_RandomGraphTest_createRandomGraphGenerator(
+ JNIEnv* env, jclass /* static method */, jstring _nnApiDeviceName,
+ jint _numOperations, jint _dimensionRange, jstring _testName,
+ jstring _nnapiLogPath, jstring _failedModelDumpPath) {
+ const char* nnApiDeviceName =
+ _nnApiDeviceName ? env->GetStringUTFChars(_nnApiDeviceName, nullptr)
+ : nullptr;
+
+ std::string nnApiDeviceNameStr{nnApiDeviceName ? nnApiDeviceName : ""};
+ const ANeuralNetworksDevice* device = nullptr;
+ if (nnApiDeviceName) {
+ device = findDeviceByName(nnApiDeviceName);
+ if (!device) {
+ LOG(ERROR) << ": Unable to find accelerator " << nnApiDeviceName;
+ env->ReleaseStringUTFChars(_nnApiDeviceName, nnApiDeviceName);
+ return reinterpret_cast<jlong>(nullptr);
+ }
+ env->ReleaseStringUTFChars(_nnApiDeviceName, nnApiDeviceName);
+ }
+
+ std::string testName{"no-test-name"};
+ if (_testName) {
+ const char* testNameBuf = env->GetStringUTFChars(_testName, nullptr);
+ testName = testNameBuf;
+ env->ReleaseStringUTFChars(_testName, testNameBuf);
+ }
+
+ std::string nnapiLogPath;
+ if (_nnapiLogPath) {
+ const char* nnapiLogPathTmp =
+ env->GetStringUTFChars(_nnapiLogPath, nullptr);
+ nnapiLogPath = nnapiLogPathTmp;
+ env->ReleaseStringUTFChars(_nnapiLogPath, nnapiLogPathTmp);
+ }
+
+ std::string failedModelDumpPath;
+ if (_failedModelDumpPath) {
+ const char* failedModelDumpPathTmp =
+ env->GetStringUTFChars(_failedModelDumpPath, nullptr);
+ failedModelDumpPath = failedModelDumpPathTmp;
+ env->ReleaseStringUTFChars(_failedModelDumpPath, failedModelDumpPathTmp);
+ }
+
+ uint32_t numOperations = static_cast<uint32_t>(_numOperations);
+ uint32_t dimensionRange = static_cast<uint32_t>(_dimensionRange);
+
+ RandomGraphGenerator* result = new RandomGraphGenerator(
+ device, nnApiDeviceNameStr, testName, numOperations, dimensionRange,
+ nnapiLogPath, failedModelDumpPath);
+
+ if (result->init() != kSuccess) {
+ delete result;
+ return reinterpret_cast<jlong>(nullptr);
+ }
+
+ return reinterpret_cast<jlong>(result);
+}
+
+extern "C" JNIEXPORT void JNICALL
+com_android_nn_crashtest_core_RandomGraphTest_RandomGraphTest_destroyRandomGraphGenerator(
+ JNIEnv* env, jclass /* static method */, jlong generatorHandle) {
+ RandomGraphGenerator* graphGenerator =
+ reinterpret_cast<RandomGraphGenerator*>(generatorHandle);
+ delete graphGenerator;
+}
diff --git a/native/Android.bp b/native/Android.bp
index e93857b..315537b 100644
--- a/native/Android.bp
+++ b/native/Android.bp
@@ -19,7 +19,6 @@
header_libs: [
"flatbuffer_headers",
"jni_headers",
- "libnnbenchmark_jni_headers",
"tensorflow_headers",
],
shared_libs: [
@@ -31,4 +30,7 @@
],
sdk_version: "current",
stl: "libc++_static",
+ strip: {
+ keep_symbols: true,
+ },
}
diff --git a/src/com/android/nn/benchmark/core/BenchmarkResult.java b/src/com/android/nn/benchmark/core/BenchmarkResult.java
index eeb6d46..6edf824 100644
--- a/src/com/android/nn/benchmark/core/BenchmarkResult.java
+++ b/src/com/android/nn/benchmark/core/BenchmarkResult.java
@@ -232,9 +232,19 @@
return result.toString();
}
+ public boolean hasBenchmarkError() {
+ return !TextUtils.isEmpty(mBenchmarkError);
+ }
+
+ public String getBenchmarkError() {
+ if (!hasBenchmarkError()) return null;
+
+ return mBenchmarkError;
+ }
+
public String getSummary(float baselineSec) {
- if (!TextUtils.isEmpty(mBenchmarkError)) {
- return mBenchmarkError;
+ if (hasBenchmarkError()) {
+ return getBenchmarkError();
}
java.text.DecimalFormat df = new java.text.DecimalFormat("######.##");
diff --git a/src/com/android/nn/benchmark/core/NNTestBase.java b/src/com/android/nn/benchmark/core/NNTestBase.java
index a2cd29e..f354c6f 100644
--- a/src/com/android/nn/benchmark/core/NNTestBase.java
+++ b/src/com/android/nn/benchmark/core/NNTestBase.java
@@ -35,7 +35,7 @@
import java.util.Random;
import java.util.stream.Collectors;
-public class NNTestBase {
+public class NNTestBase implements AutoCloseable {
protected static final String TAG = "NN_TESTBASE";
// Used to load the 'native-lib' library on application startup.
@@ -54,18 +54,6 @@
*/
private static native boolean getAcceleratorNames(List<String> resultList);
- public static List<String> availableAcceleratorNames() {
- List<String> availableAccelerators = new ArrayList<>();
- if (NNTestBase.getAcceleratorNames(availableAccelerators)) {
- return availableAccelerators.stream().filter(
- acceleratorName -> !acceleratorName.equalsIgnoreCase(
- "nnapi-reference")).collect(Collectors.toList());
- } else {
- Log.e(TAG, "Unable to retrieve accelerator names!!");
- return Collections.EMPTY_LIST;
- }
- }
-
private synchronized native long initModel(
String modelFileName,
boolean useNNApi,
@@ -77,17 +65,6 @@
private synchronized native boolean resizeInputTensors(long modelHandle, int[] inputShape);
- /** Discard inference output in inference results. */
- public static final int FLAG_DISCARD_INFERENCE_OUTPUT = 1 << 0;
- /**
- * Do not expect golden outputs with inference inputs.
- *
- * Useful in cases where there's no straightforward golden output values
- * for the benchmark. This will also skip calculating basic (golden
- * output based) error metrics.
- */
- public static final int FLAG_IGNORE_GOLDEN_OUTPUT = 1 << 1;
-
private synchronized native boolean runBenchmark(long modelHandle,
List<InferenceInOutSequence> inOutList,
List<InferenceResult> resultList,
@@ -100,6 +77,30 @@
String dumpPath,
List<InferenceInOutSequence> inOutList);
+ public static List<String> availableAcceleratorNames() {
+ List<String> availableAccelerators = new ArrayList<>();
+ if (NNTestBase.getAcceleratorNames(availableAccelerators)) {
+ return availableAccelerators.stream().filter(
+ acceleratorName -> !acceleratorName.equalsIgnoreCase(
+ "nnapi-reference")).collect(Collectors.toList());
+ } else {
+ Log.e(TAG, "Unable to retrieve accelerator names!!");
+ return Collections.EMPTY_LIST;
+ }
+ }
+
+ /** Discard inference output in inference results. */
+ public static final int FLAG_DISCARD_INFERENCE_OUTPUT = 1 << 0;
+ /**
+ * Do not expect golden outputs with inference inputs.
+ *
+ * Useful in cases where there's no straightforward golden output values
+ * for the benchmark. This will also skip calculating basic (golden
+ * output based) error metrics.
+ */
+ public static final int FLAG_IGNORE_GOLDEN_OUTPUT = 1 << 1;
+
+
protected Context mContext;
protected TextView mText;
private final String mModelName;
@@ -116,6 +117,8 @@
private final int mMinSdkVersion;
private Optional<String> mNNApiDeviceName = Optional.empty();
private boolean mMmapModel = false;
+ // Path where the current model has been stored for execution
+ private String mTemporaryModelFilePath;
public NNTestBase(String modelName, String modelFile, int[] inputShape,
InferenceInOutSequence.FromAssets[] inputOutputAssets,
@@ -169,9 +172,12 @@
public final boolean setupModel(Context ipcxt) throws IOException, NnApiDelegationFailure {
mContext = ipcxt;
- String modelFileName = copyAssetToFile();
+ if (mTemporaryModelFilePath != null) {
+ deleteOrWarn(mTemporaryModelFilePath);
+ }
+ mTemporaryModelFilePath = copyAssetToFile();
mModelHandle = initModel(
- modelFileName, mUseNNApi, mEnableIntermediateTensorsDump,
+ mTemporaryModelFilePath, mUseNNApi, mEnableIntermediateTensorsDump,
mNNApiDeviceName.orElse(null), mMmapModel);
if (mModelHandle == 0) {
Log.e(TAG, "Failed to init the model");
@@ -200,21 +206,30 @@
}
}
+ private void deleteOrWarn(String path) {
+ if (!new File(path).delete()) {
+ Log.w(TAG, String.format(
+ "Unable to delete file '%s'. This might cause device to run out of space.",
+ path));
+ }
+ }
+
+
private List<InferenceInOutSequence> getInputOutputAssets() throws IOException {
// TODO: Caching, don't read inputs for every inference
List<InferenceInOutSequence> inOutList =
- getInputOutputAssets(mContext, mInputOutputAssets, mInputOutputDatasets);
+ getInputOutputAssets(mContext, mInputOutputAssets, mInputOutputDatasets);
Boolean lastGolden = null;
for (InferenceInOutSequence sequence : inOutList) {
mHasGoldenOutputs = sequence.hasGoldenOutput();
if (lastGolden == null) {
- lastGolden = mHasGoldenOutputs;
+ lastGolden = mHasGoldenOutputs;
} else {
- if (lastGolden != mHasGoldenOutputs) {
- throw new IllegalArgumentException(
- "Some inputs for " + mModelName + " have outputs while some don't.");
- }
+ if (lastGolden != mHasGoldenOutputs) {
+ throw new IllegalArgumentException(
+ "Some inputs for " + mModelName + " have outputs while some don't.");
+ }
}
}
return inOutList;
@@ -332,6 +347,10 @@
destroyModel(mModelHandle);
mModelHandle = 0;
}
+ if (mTemporaryModelFilePath != null) {
+ deleteOrWarn(mTemporaryModelFilePath);
+ mTemporaryModelFilePath = null;
+ }
}
private final Random mRandom = new Random(System.currentTimeMillis());
@@ -377,4 +396,9 @@
throw e;
}
}
+
+ @Override
+ public void close() {
+ destroy();
+ }
}
diff --git a/src/com/android/nn/benchmark/core/Processor.java b/src/com/android/nn/benchmark/core/Processor.java
index c1c3b8a..e74084c 100644
--- a/src/com/android/nn/benchmark/core/Processor.java
+++ b/src/com/android/nn/benchmark/core/Processor.java
@@ -111,28 +111,36 @@
TestModels.TestModelEntry t, float warmupTimeSeconds, float runTimeSeconds)
throws IOException, BenchmarkException {
mTest = changeTest(mTest, t);
- BenchmarkResult result = getBenchmark(warmupTimeSeconds, runTimeSeconds);
- mTest.destroy();
- mTest = null;
- return result;
+ try {
+ BenchmarkResult result = getBenchmark(warmupTimeSeconds, runTimeSeconds);
+ return result;
+ } finally {
+ mTest.destroy();
+ mTest = null;
+ }
}
public static boolean isTestModelSupportedByAccelerator(Context context,
TestModels.TestModelEntry testModelEntry, String acceleratorName)
throws NnApiDelegationFailure {
- NNTestBase tb = testModelEntry.createNNTestBase(/*useNnnapi=*/ true,
+ try(NNTestBase tb = testModelEntry.createNNTestBase(/*useNnnapi=*/ true,
/*enableIntermediateTensorsDump=*/false,
- /*mmapModel=*/ false);
- tb.setNNApiDeviceName(acceleratorName);
- try {
+ /*mmapModel=*/ false)) {
+ tb.setNNApiDeviceName(acceleratorName);
return tb.setupModel(context);
} catch (IOException e) {
Log.w(TAG,
String.format("Error trying to check support for model %s on accelerator %s",
testModelEntry.mModelName, acceleratorName), e);
return false;
- } finally {
- tb.destroy();
+ } catch (NnApiDelegationFailure nnApiDelegationFailure) {
+ if (nnApiDelegationFailure.getNnApiErrno() == 4 /*ANEURALNETWORKS_BAD_DATA*/) {
+ // Compilation will fail with ANEURALNETWORKS_BAD_DATA if the device is not
+ // supporting all operation in the model
+ return false;
+ }
+
+ throw nnApiDelegationFailure;
}
}
@@ -231,7 +239,6 @@
while (mRun.get()) {
try {
benchmarkAllModels();
- Log.d(TAG, "Processor completed work");
} catch (IOException | BenchmarkException e) {
Log.e(TAG, "Exception during benchmark run", e);
success = false;
@@ -241,6 +248,7 @@
throw e;
}
}
+ Log.d(TAG, "Processor completed work");
mCallback.onBenchmarkFinish(success);
} finally {
if (mTest != null) {
diff --git a/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java b/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
index 99030be..e38ba0e 100644
--- a/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
+++ b/src/com/android/nn/crashtest/app/AcceleratorSpecificTestSupport.java
@@ -22,6 +22,7 @@
import androidx.test.platform.app.InstrumentationRegistry;
import com.android.nn.benchmark.core.BenchmarkException;
+import com.android.nn.benchmark.core.BenchmarkResult;
import com.android.nn.benchmark.core.NNTestBase;
import com.android.nn.benchmark.core.NnApiDelegationFailure;
import com.android.nn.benchmark.core.Processor;
@@ -120,7 +121,13 @@
public Boolean call() throws Exception {
while (mRun.get()) {
try {
- mProcessor.getInstrumentationResult(mTestModelEntry, 0, 3);
+ BenchmarkResult modelExecutionResult = mProcessor.getInstrumentationResult(
+ mTestModelEntry, 0, 3);
+ if (modelExecutionResult.hasBenchmarkError()) {
+ Log.e(TAG, String.format("Benchmark failed with message %s",
+ modelExecutionResult.getBenchmarkError()));
+ return false;
+ }
} catch (IOException | BenchmarkException e) {
Log.e(TAG, String.format("Error running model %s", mTestModelEntry.mModelName));
return false;
diff --git a/src/com/android/nn/crashtest/app/NNMultiProcessTestActivity.java b/src/com/android/nn/crashtest/app/NNMultiProcessTestActivity.java
index 8962e78..fb34ad4 100644
--- a/src/com/android/nn/crashtest/app/NNMultiProcessTestActivity.java
+++ b/src/com/android/nn/crashtest/app/NNMultiProcessTestActivity.java
@@ -26,6 +26,7 @@
public class NNMultiProcessTestActivity extends Activity {
private static final String TAG = "NNMultiProcessTest";
+ public static final Duration MAX_TEST_DELAY_BEFORE_HANG = Duration.ofSeconds(30);
private final CrashTestStatus mTestStatus = new CrashTestStatus(this::logMessage);
private final CrashTestCoordinator mCoordinator = new CrashTestCoordinator(this);
@@ -60,7 +61,7 @@
// successfully
public CrashTestStatus.TestResult testResult() {
try {
- final Duration testTimeout = mDuration.plus(Duration.ofSeconds(15));
+ final Duration testTimeout = mDuration.plus(MAX_TEST_DELAY_BEFORE_HANG);
boolean completed =
mTestStatus.waitForCompletion(testTimeout.toMillis(), TimeUnit.MILLISECONDS);
if (!completed) {
diff --git a/src/com/android/nn/crashtest/app/NNRandomGraphExecutionTest.java b/src/com/android/nn/crashtest/app/NNRandomGraphExecutionTest.java
new file mode 100644
index 0000000..e05cd99
--- /dev/null
+++ b/src/com/android/nn/crashtest/app/NNRandomGraphExecutionTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.nn.crashtest.app;
+
+import com.android.nn.crashtest.app.NNRandomGraphTest;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.time.Duration;
+
+@RunWith(Parameterized.class)
+public class NNRandomGraphExecutionTest extends NNRandomGraphTest {
+ public NNRandomGraphExecutionTest(int modelCount, int graphSize, int dimensionRange,
+ Duration duration, String acceleratorName) {
+ super(modelCount, graphSize, dimensionRange, duration,
+ acceleratorName, /*runModelCompilationOnly=*/false);
+ }
+}
\ No newline at end of file
diff --git a/src/com/android/nn/crashtest/app/NNRandomGraphLoadTest.java b/src/com/android/nn/crashtest/app/NNRandomGraphLoadTest.java
new file mode 100644
index 0000000..34962f4
--- /dev/null
+++ b/src/com/android/nn/crashtest/app/NNRandomGraphLoadTest.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.nn.crashtest.app;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.time.Duration;
+
+@RunWith(Parameterized.class)
+public class NNRandomGraphLoadTest extends NNRandomGraphTest {
+ public NNRandomGraphLoadTest(int modelCount, int graphSize, int dimensionRange,
+ Duration duration, String acceleratorName) {
+ super(modelCount, graphSize, dimensionRange, duration,
+ acceleratorName, /*runModelCompilationOnly=*/true);
+ }
+}
\ No newline at end of file
diff --git a/src/com/android/nn/crashtest/app/NNRandomGraphTest.java b/src/com/android/nn/crashtest/app/NNRandomGraphTest.java
new file mode 100644
index 0000000..86e3b0f
--- /dev/null
+++ b/src/com/android/nn/crashtest/app/NNRandomGraphTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.nn.crashtest.app;
+
+import static com.android.nn.crashtest.app.CrashTestStatus.TestResult.SUCCESS;
+
+import android.content.Intent;
+import android.test.ActivityInstrumentationTestCase2;
+import android.test.UiThreadTest;
+import android.test.suitebuilder.annotation.LargeTest;
+
+import androidx.test.InstrumentationRegistry;
+
+import com.android.nn.benchmark.app.BenchmarkTestBase;
+import com.android.nn.benchmark.core.NnApiDelegationFailure;
+import com.android.nn.benchmark.core.TestModels;
+import com.android.nn.crashtest.core.test.RandomGraphTest;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import java.time.Duration;
+import java.util.Arrays;
+import java.util.Optional;
+import java.util.concurrent.ExecutionException;
+
+@RunWith(Parameterized.class)
+public abstract class NNRandomGraphTest
+ extends ActivityInstrumentationTestCase2<NNRandomGraphTestActivity>
+ implements AcceleratorSpecificTestSupport {
+ private static final String TAG = "NN_RAND_MODEL";
+
+ protected final String mAcceleratorName;
+ private final int mModelCount;
+ private final int mGraphSize;
+ private final Duration mDuration;
+ private final int mDimensionRange;
+
+ private final static int SMALL_MODEL_SIZE = 10;
+ private final static int LARGE_MODEL_SIZE = 600;
+ private final static int NARROW_DIMENSIONS_RANGE = 10;
+ private final static int WIDE_DIMENSIONS_RANGE = 1000;
+ private final static Duration MAX_TEST_DURATION = Duration.ofMinutes(15);
+ private final static int NUMBER_OF_MODELS = 600;
+ private final boolean mRunModelCompilationOnly;
+
+ @Parameters(name = "{0} models of size {1} and dimensions range {2} for max duration of {3} "
+ + "on accelerator {4}")
+ public static Iterable<Object[]> testConfiguration() {
+ return AcceleratorSpecificTestSupport.perAcceleratorTestConfig(
+ Arrays.asList(
+ new Object[]{NUMBER_OF_MODELS, SMALL_MODEL_SIZE, WIDE_DIMENSIONS_RANGE,
+ MAX_TEST_DURATION},
+ new Object[]{NUMBER_OF_MODELS, LARGE_MODEL_SIZE, NARROW_DIMENSIONS_RANGE,
+ MAX_TEST_DURATION}));
+ }
+
+ @Rule
+ public TestName mTestName = new TestName();
+
+ public NNRandomGraphTest(int modelCount, int graphSize, int dimensionRange,
+ Duration duration, String acceleratorName, boolean runModelCompilationOnly) {
+ super(NNRandomGraphTestActivity.class);
+ mModelCount = modelCount;
+ mGraphSize = graphSize;
+ mDuration = duration;
+ mAcceleratorName = acceleratorName;
+ mDimensionRange = dimensionRange;
+ mRunModelCompilationOnly = runModelCompilationOnly;
+ }
+
+ @Before
+ @Override
+ public void setUp() {
+ injectInstrumentation(InstrumentationRegistry.getInstrumentation());
+ BenchmarkTestBase.waitUntilCharged(getInstrumentation().getTargetContext(), 60);
+ setActivityIntent(getTestModelsOfSizeAndRangeForMaxTimeIntent(mGraphSize, mDimensionRange,
+ mModelCount, mAcceleratorName, mDuration, mTestName.getMethodName()));
+ }
+
+ protected Optional<TestModels.TestModelEntry> findModelForLivenessTest()
+ throws NnApiDelegationFailure {
+ return findTestModelRunningOnAccelerator(
+ getInstrumentation().getTargetContext(), mAcceleratorName);
+ }
+
+ @Test
+ @LargeTest
+ @UiThreadTest
+ public void testDriverDoesNotFailWithParallelWorkload()
+ throws ExecutionException, InterruptedException, NnApiDelegationFailure {
+ final NNRandomGraphTestActivity activity = getActivity();
+
+ assertEquals(SUCCESS, activity.testResult());
+ }
+
+ /**
+ * @return the intent to use to initialise the RandomGraphTest test class
+ */
+ protected Intent getTestModelsOfSizeAndRangeForMaxTimeIntent(int graphSize, int dimensionsRange,
+ int modelsCount, String deviceName, Duration maxTestDuration, String testName) {
+ Intent result = new Intent();
+ RandomGraphTest
+ .intentInitializer(graphSize, dimensionsRange, modelsCount,
+ RandomGraphTest.DEFAULT_PAUSE_BETWEEN_MODELS_MILLIS,
+ mRunModelCompilationOnly, deviceName, maxTestDuration.toMillis(), testName)
+ .addIntentParams(result);
+ return result;
+ }
+}
\ No newline at end of file
diff --git a/src/com/android/nn/crashtest/app/NNRandomGraphTestActivity.java b/src/com/android/nn/crashtest/app/NNRandomGraphTestActivity.java
new file mode 100644
index 0000000..76e2f64
--- /dev/null
+++ b/src/com/android/nn/crashtest/app/NNRandomGraphTestActivity.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.nn.crashtest.app;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.util.Log;
+
+
+import com.android.nn.crashtest.core.CrashTestCoordinator;
+import com.android.nn.crashtest.core.test.RandomGraphTest;
+
+import java.time.Duration;
+import java.util.concurrent.TimeUnit;
+
+public class NNRandomGraphTestActivity extends Activity {
+ private static final String TAG = "NN_RAND_MODEL";
+
+ private final CrashTestStatus mTestStatus = new CrashTestStatus(this::logMessage);
+ private final CrashTestCoordinator mCoordinator = new CrashTestCoordinator(this);
+ private Duration mDuration;
+
+ protected void logMessage(String msg) {
+ Log.i(TAG, msg);
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+
+ final Intent intent = getIntent();
+
+ mDuration = Duration.ofMillis(intent.getLongExtra(
+ RandomGraphTest.MAX_TEST_DURATION, RandomGraphTest.DEFAULT_MAX_TEST_DURATION_MILLIS));
+ mCoordinator.startTest(RandomGraphTest.class,
+ RandomGraphTest.intentInitializer(
+ intent.getIntExtra(RandomGraphTest.GRAPH_SIZE, RandomGraphTest.DEFAULT_GRAPH_SIZE),
+ intent.getIntExtra(
+ RandomGraphTest.DIMENSIONS_RANGE, RandomGraphTest.DEFAULT_DIMENSIONS_RANGE),
+ intent.getIntExtra(RandomGraphTest.MODELS_COUNT, RandomGraphTest.DEFAULT_MODELS_COUNT),
+ intent.getLongExtra(RandomGraphTest.PAUSE_BETWEEN_MODELS_MS,
+ RandomGraphTest.DEFAULT_PAUSE_BETWEEN_MODELS_MILLIS),
+ intent.getBooleanExtra(
+ RandomGraphTest.COMPILATION_ONLY, RandomGraphTest.DEFAULT_COMPILATION_ONLY),
+ intent.getStringExtra(RandomGraphTest.DEVICE_NAME),
+ mDuration.toMillis(),
+ intent.getStringExtra(RandomGraphTest.TEST_NAME)),
+ mTestStatus,
+ /*separateProcess=*/true, intent.getStringExtra(RandomGraphTest.TEST_NAME));
+ }
+
+ // This method blocks until the tests complete and returns true if all tests completed
+ // successfully
+ public CrashTestStatus.TestResult testResult() {
+ try {
+ final Duration testTimeout = mDuration.plus(Duration.ofSeconds(15));
+ boolean completed =
+ mTestStatus.waitForCompletion(testTimeout.toMillis(), TimeUnit.MILLISECONDS);
+ if (!completed) {
+ Log.w(TAG, String.format("Test didn't comoplete within %s. Returning HANG", testTimeout));
+ return CrashTestStatus.TestResult.HANG;
+ }
+ return mTestStatus.result();
+ } catch (InterruptedException e) {
+ Log.w(TAG, "Interrupted while waiting for test completion. Returning HANG");
+ return CrashTestStatus.TestResult.HANG;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/com/android/nn/crashtest/core/CrashTestService.java b/src/com/android/nn/crashtest/core/CrashTestService.java
index f6f4680..63c2cf0 100644
--- a/src/com/android/nn/crashtest/core/CrashTestService.java
+++ b/src/com/android/nn/crashtest/core/CrashTestService.java
@@ -28,8 +28,10 @@
import java.util.Objects;
import java.util.Optional;
+import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
public class CrashTestService extends Service {
@@ -43,11 +45,16 @@
public static final int SET_COMM_CHANNEL = 4;
public static final int KILL_PROCESS = 5;
+ // Starting tests only after the crash test coordinator has set the communication
+ // channel to me in order to avoid event notifications
+ private final CountDownLatch startTest = new CountDownLatch(1);
+
Messenger lifecycleListener = null;
final Messenger mMessenger = new Messenger(new Handler(message -> {
switch (message.what) {
case SET_COMM_CHANNEL:
lifecycleListener = message.replyTo;
+ startTest.countDown();
break;
case KILL_PROCESS:
@@ -95,11 +102,20 @@
executor.submit(() -> {
try {
+ startTest.await(3, TimeUnit.SECONDS);
+ } catch (InterruptedException e) {
+ Thread.interrupted();
+ Log.e(TAG, "Interrupted before starting test", e);
+ stopSelf();
+ return;
+ }
+
+ try {
final Optional<String> testResult = crashTest.call();
Log.d(TAG, String.format("Test '%s' completed with result: %s", testClassName,
testResult.orElse("success")));
notify(testResult.isPresent() ? FAILURE : SUCCESS, testResult.orElse(null));
- } catch (Exception e) {
+ } catch (Throwable e) {
Log.e(TAG, "Exception in crash test", e);
notify(FAILURE, "Exception in crash test: " + e);
stopSelf();
@@ -108,6 +124,9 @@
} catch (Exception e) {
Log.e(TAG, "Exception starting test ", e);
stopSelf();
+ } catch (Error error) {
+ Log.e(TAG, "Error starting test ", error);
+ throw error;
}
return mMessenger.getBinder();
diff --git a/src/com/android/nn/crashtest/core/test/RandomGraphTest.java b/src/com/android/nn/crashtest/core/test/RandomGraphTest.java
new file mode 100644
index 0000000..c5403ca
--- /dev/null
+++ b/src/com/android/nn/crashtest/core/test/RandomGraphTest.java
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.nn.crashtest.core.test;
+
+import android.annotation.SuppressLint;
+import android.content.Context;
+import android.content.Intent;
+import android.text.TextUtils;
+import android.util.Log;
+
+
+import com.android.nn.crashtest.core.CrashTest;
+import com.android.nn.crashtest.core.CrashTestCoordinator;
+
+import java.io.File;
+import java.time.Duration;
+import java.time.LocalDateTime;
+import java.util.Optional;
+
+public class RandomGraphTest implements CrashTest {
+ private static final String TAG = "NN_RAND_MODEL";
+
+ private static final boolean ENABLE_NNAPI_LOGS = false;
+
+ private String getGeneratorOutFilePath(String fileExtension) {
+ return mContext.getExternalFilesDir(null).getAbsolutePath() + "/"
+ + mTestName.hashCode() + "." + fileExtension;
+ }
+
+ private String getNnapiLogFilePath() {
+ if (ENABLE_NNAPI_LOGS) {
+ String logFile = getGeneratorOutFilePath("model.py");
+ Log.d(TAG, String.format("Writing NNAPI Fuzzer logs to %s", logFile));
+ return logFile;
+ } else {
+ return "";
+ }
+ }
+
+ private String getFailedModelDumpPath() {
+ return getGeneratorOutFilePath("log");
+ }
+
+ static {
+ System.loadLibrary("random_graph_test_jni");
+ }
+
+ private enum RandomModelExecutionResult {
+ // This is the java translation of the RandomModelExecutionResult c++ enum in
+ // random_graph_test_jni.cpp
+ kSuccess(0, ""),
+ kFailedCompilation(1, "Compilation failed"),
+ kFailedExecution(2, "Execution failed"),
+ kFailedOtherNnApiCall(3,
+ "Failure trying to interact with the driver"),
+ kInvalidModelGenerated(4, "Unable to generate a valid model"),
+ kUnsupportedModelGenerated(5, "Unable to generate a model supported by the driver");
+
+
+ private final int mValue;
+ private final String mDescription;
+
+ RandomModelExecutionResult(int value, String description) {
+ mValue = value;
+ mDescription = description;
+ }
+
+ public static RandomModelExecutionResult fromNativeResult(int nativeResult) {
+ for (RandomModelExecutionResult currValue : RandomModelExecutionResult.values()) {
+ if (currValue.mValue == nativeResult) {
+ return currValue;
+ }
+ }
+ throw new IllegalArgumentException(
+ String.format("Invalid native result value %d", nativeResult));
+ }
+ }
+
+ public static final String MAX_TEST_DURATION = "max_test_duration";
+ public static final String GRAPH_SIZE = "graph_size";
+ public static final String DIMENSIONS_RANGE = "dimensions_range";
+ public static final String MODELS_COUNT = "models_count";
+ public static final String PAUSE_BETWEEN_MODELS_MS = "pause_between_models_ms";
+ public static final String COMPILATION_ONLY = "compilation_only";
+ public static final String DEVICE_NAME = "device_name";
+ public static final String TEST_NAME = "test_name";
+
+ public static final int DEFAULT_GRAPH_SIZE = 100;
+ public static final int DEFAULT_DIMENSIONS_RANGE = 100;
+ public static final int DEFAULT_MODELS_COUNT = 100;
+ public static final long DEFAULT_PAUSE_BETWEEN_MODELS_MILLIS = 300;
+ public static final boolean DEFAULT_COMPILATION_ONLY = false;
+ public static final long DEFAULT_MAX_TEST_DURATION_MILLIS = Duration.ofMinutes(2).toMillis();
+ private static final long MAX_TIME_TO_LOOK_FOR_SUITABLE_MODEL_SECONDS = 30;
+
+ static public CrashTestCoordinator.CrashTestIntentInitializer intentInitializer(int graphSize,
+ int dimensionsRange, int modelsCount, long pauseBetweenModelsMillis,
+ boolean compilationOnly, String deviceName, long maxTestDurationMillis,
+ String testName) {
+ return intent -> {
+ intent.putExtra(GRAPH_SIZE, graphSize);
+ intent.putExtra(DIMENSIONS_RANGE, dimensionsRange);
+ intent.putExtra(MODELS_COUNT, modelsCount);
+ intent.putExtra(PAUSE_BETWEEN_MODELS_MS, pauseBetweenModelsMillis);
+ intent.putExtra(COMPILATION_ONLY, compilationOnly);
+ intent.putExtra(DEVICE_NAME, deviceName);
+ intent.putExtra(MAX_TEST_DURATION, maxTestDurationMillis);
+ intent.putExtra(TEST_NAME, testName);
+ };
+ }
+
+
+ private Context mContext;
+ private String mDeviceName;
+ private boolean mCompilationOnly;
+ private int mGraphSize;
+ private int mDimensionsRange;
+ private int mModelsCount;
+ private long mPauseBetweenModelsMillis;
+ private Duration mMaxTestDuration;
+ private String mTestName;
+
+ public static native long createRandomGraphGenerator(String nnApiDeviceName, int numOperations,
+ int dimensionRange,
+ String testName, String nnapiLogPath, String failedModelDumpPath);
+
+ public static native long destroyRandomGraphGenerator(long generatorHandle);
+
+ private static native int runRandomModel(long generatorHandle,
+ boolean compilationOnly, long maxModelSearchTimeSeconds);
+
+ @Override
+ public void init(Context context, Intent configParams,
+ Optional<ProgressListener> progressListener) {
+ mContext = context;
+ mDeviceName = configParams.getStringExtra(DEVICE_NAME);
+ mCompilationOnly = configParams.getBooleanExtra(COMPILATION_ONLY, DEFAULT_COMPILATION_ONLY);
+ mGraphSize = configParams.getIntExtra(GRAPH_SIZE, DEFAULT_GRAPH_SIZE);
+ mDimensionsRange = configParams.getIntExtra(DIMENSIONS_RANGE, DEFAULT_DIMENSIONS_RANGE);
+ mModelsCount = configParams.getIntExtra(MODELS_COUNT, DEFAULT_MODELS_COUNT);
+ mPauseBetweenModelsMillis =
+ configParams.getLongExtra(PAUSE_BETWEEN_MODELS_MS,
+ DEFAULT_PAUSE_BETWEEN_MODELS_MILLIS);
+ mMaxTestDuration =
+ Duration.ofMillis(configParams.getLongExtra(MAX_TEST_DURATION,
+ DEFAULT_MAX_TEST_DURATION_MILLIS));
+ mTestName = configParams.getStringExtra(TEST_NAME) != null
+ ? configParams.getStringExtra(TEST_NAME)
+ : "no-name";
+ }
+
+ @SuppressLint("DefaultLocale")
+ @Override
+ public Optional<String> call() throws Exception {
+ LocalDateTime testStart = LocalDateTime.now();
+ Log.i(TAG,
+ String.format(String.format(
+ "Starting test '%s', testing %d models of size %d and dimension range %d "
+ + "for a max duration of %s on device %s.",
+ mTestName, mModelsCount, mGraphSize, mDimensionsRange, mMaxTestDuration,
+ mDeviceName != null ? mDeviceName : "no-device")));
+
+ final long generatorHandle = RandomGraphTest.createRandomGraphGenerator(mDeviceName,
+ mGraphSize, mDimensionsRange, mTestName, getNnapiLogFilePath(),
+ getFailedModelDumpPath());
+ if (generatorHandle == 0) {
+ Log.e(TAG, "Unable to initialize random graph generator, failing test");
+ return failure("Unable to initialize random graph generator");
+ }
+ try {
+ for (int i = 0; i < mModelsCount; i++) {
+ if (Duration.between(testStart, LocalDateTime.now()).plus(
+ Duration.ofSeconds(MAX_TIME_TO_LOOK_FOR_SUITABLE_MODEL_SECONDS)).compareTo(
+ mMaxTestDuration)
+ >= 0) {
+ Log.d(TAG, "Max test duration reached, ending test");
+ break;
+ }
+
+ int nativeExecutionResult = runRandomModel(generatorHandle,
+ mCompilationOnly, MAX_TIME_TO_LOOK_FOR_SUITABLE_MODEL_SECONDS);
+
+ RandomModelExecutionResult executionResult =
+ RandomModelExecutionResult.fromNativeResult(nativeExecutionResult);
+
+ if (executionResult != RandomModelExecutionResult.kSuccess) {
+ Log.w(TAG, String.format(
+ "Received failure result '%s' at iteration %d, failing",
+ executionResult.mDescription, i));
+ if (executionResult == RandomModelExecutionResult.kFailedExecution ||
+ executionResult == RandomModelExecutionResult.kFailedCompilation) {
+ Log.i(TAG, String.format("Model has been dumped at path '%s'",
+ getFailedModelDumpPath()));
+ } else if (
+ executionResult == RandomModelExecutionResult.kUnsupportedModelGenerated
+ || executionResult
+ == RandomModelExecutionResult.kInvalidModelGenerated) {
+ Log.w(TAG, String.format(
+ "Unable to find a valid model for test '%s', returning success "
+ + "anyway",
+ mTestName));
+
+ return success();
+ }
+
+ return failure(executionResult.mDescription);
+ } else if (!TextUtils.isEmpty(getNnapiLogFilePath())) {
+ (new File(getNnapiLogFilePath())).delete();
+ }
+
+ Thread.sleep(mPauseBetweenModelsMillis);
+ }
+
+ return success();
+ } finally {
+ RandomGraphTest.destroyRandomGraphGenerator(generatorHandle);
+ }
+ }
+}