Revert "Fix memory leak"

This reverts commit 90f51e0c85db8ff8b9855b05c68bb867d464e5c9.

Reason for revert: b/166682761 - build failures on rvc-dev branch

Bug: 166073521
Bug: 166682761
Change-Id: Ieefdd10b96e9e2301d06b88abb377e0367ee882d
Test: TreeHugger
diff --git a/neuralnetworks/V1_2/benchmark/java/src/com/android/nn/benchmark/vts/v1_2/NNAccuracyTest.java b/neuralnetworks/V1_2/benchmark/java/src/com/android/nn/benchmark/vts/v1_2/NNAccuracyTest.java
index 6c1aaf6..17ca0ba 100644
--- a/neuralnetworks/V1_2/benchmark/java/src/com/android/nn/benchmark/vts/v1_2/NNAccuracyTest.java
+++ b/neuralnetworks/V1_2/benchmark/java/src/com/android/nn/benchmark/vts/v1_2/NNAccuracyTest.java
@@ -117,18 +117,17 @@
     @Test
     @LargeTest
     public void testDriver() throws BenchmarkException, IOException {
-        try (NNTestBase test = mModel.mEntry.createNNTestBase()) {
-            test.useNNApi();
-            test.setNNApiDeviceName(mModel.mInstance);
-            if (!test.setupModel(mActivity)) {
-                throw new AssumptionViolatedException("The driver rejected the model.");
-            }
-            Pair<List<InferenceInOutSequence>, List<InferenceResult>> inferenceResults =
-                    test.runBenchmarkCompleteInputSet(/*setRepeat=*/1, /*timeoutSec=*/3600);
-            BenchmarkResult benchmarkResult = BenchmarkResult.fromInferenceResults(
-                    mModel.mEntry.mModelName, BenchmarkResult.BACKEND_TFLITE_NNAPI,
-                    inferenceResults.first, inferenceResults.second, test.getEvaluator());
-            assertFalse(benchmarkResult.hasValidationErrors());
+        NNTestBase test = mModel.mEntry.createNNTestBase();
+        test.useNNApi();
+        test.setNNApiDeviceName(mModel.mInstance);
+        if (!test.setupModel(mActivity)) {
+            throw new AssumptionViolatedException("The driver rejected the model.");
         }
+        Pair<List<InferenceInOutSequence>, List<InferenceResult>> inferenceResults =
+                test.runBenchmarkCompleteInputSet(/*setRepeat=*/1, /*timeoutSec=*/3600);
+        BenchmarkResult benchmarkResult = BenchmarkResult.fromInferenceResults(
+                mModel.mEntry.mModelName, BenchmarkResult.BACKEND_TFLITE_NNAPI,
+                inferenceResults.first, inferenceResults.second, test.getEvaluator());
+        assertFalse(benchmarkResult.hasValidationErrors());
     }
 }
diff --git a/neuralnetworks/V1_3/benchmark/java/src/com/android/nn/benchmark/vts/v1_3/NNAccuracyTest.java b/neuralnetworks/V1_3/benchmark/java/src/com/android/nn/benchmark/vts/v1_3/NNAccuracyTest.java
index d4a05fc..bdc4311 100644
--- a/neuralnetworks/V1_3/benchmark/java/src/com/android/nn/benchmark/vts/v1_3/NNAccuracyTest.java
+++ b/neuralnetworks/V1_3/benchmark/java/src/com/android/nn/benchmark/vts/v1_3/NNAccuracyTest.java
@@ -117,18 +117,17 @@
     @Test
     @LargeTest
     public void testDriver() throws BenchmarkException, IOException {
-        try (NNTestBase test = mModel.mEntry.createNNTestBase()) {
-            test.useNNApi();
-            test.setNNApiDeviceName(mModel.mInstance);
-            if (!test.setupModel(mActivity)) {
-                throw new AssumptionViolatedException("The driver rejected the model.");
-            }
-            Pair<List<InferenceInOutSequence>, List<InferenceResult>> inferenceResults =
-                    test.runBenchmarkCompleteInputSet(/*setRepeat=*/1, /*timeoutSec=*/3600);
-            BenchmarkResult benchmarkResult = BenchmarkResult.fromInferenceResults(
-                    mModel.mEntry.mModelName, BenchmarkResult.BACKEND_TFLITE_NNAPI,
-                    inferenceResults.first, inferenceResults.second, test.getEvaluator());
-            assertFalse(benchmarkResult.hasValidationErrors());
+        NNTestBase test = mModel.mEntry.createNNTestBase();
+        test.useNNApi();
+        test.setNNApiDeviceName(mModel.mInstance);
+        if (!test.setupModel(mActivity)) {
+            throw new AssumptionViolatedException("The driver rejected the model.");
         }
+        Pair<List<InferenceInOutSequence>, List<InferenceResult>> inferenceResults =
+                test.runBenchmarkCompleteInputSet(/*setRepeat=*/1, /*timeoutSec=*/3600);
+        BenchmarkResult benchmarkResult = BenchmarkResult.fromInferenceResults(
+                mModel.mEntry.mModelName, BenchmarkResult.BACKEND_TFLITE_NNAPI,
+                inferenceResults.first, inferenceResults.second, test.getEvaluator());
+        assertFalse(benchmarkResult.hasValidationErrors());
     }
 }