DO NOT MERGE: Merge Oreo MR1 into master
Exempt-From-Owner-Approval: Changes already landed internally
Change-Id: Ieda80ac262bb51fb11bb740ecf38f5bd67c56325
diff --git a/OWNERS b/OWNERS
index 1d67f35..fb73083 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,3 +1,8 @@
-sbasi@google.com
-kevcheng@google.com
+erowe@google.com
+guangzhu@google.com
jdesprez@google.com
+kevcheng@google.com
+moonk@google.com
+sbasi@google.com
+sosa@google.com
+tsu@google.com
diff --git a/atest/__init__.py b/atest/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/atest/__init__.py
diff --git a/atest/atest.py b/atest/atest.py
new file mode 100755
index 0000000..aa2f02b
--- /dev/null
+++ b/atest/atest.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command line utility for running Android tests through TradeFederation.
+
+atest helps automate the flow of building test modules across the Android
+code base and executing the tests via the TradeFederation test harness.
+
+atest is designed to support any test types that can be ran by TradeFederation.
+"""
+
+import os
+import sys
+
+
+TARGET_TESTCASES_ENV_VARIBLE = "ANDROID_TARGET_OUT_TESTCASES"
+
+
+def _has_environment_variables():
+ """Verify the local environment has been setup to run atest.
+
+ @returns True if the environment has the correct variables initialized,
+ False otherwise.
+ """
+ return bool(os.environ.get(TARGET_TESTCASES_ENV_VARIBLE))
+
+
+def main(argv):
+ """Entry point atest script.
+
+ @param argv: arguments list
+ """
+ if not _has_environment_variables():
+ print >> sys.stderr, ("Local environment doesn't appear to have been "
+ "initialized. Did you remember to run lunch?")
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/atest/atest_unittest.py b/atest/atest_unittest.py
new file mode 100755
index 0000000..be432f4
--- /dev/null
+++ b/atest/atest_unittest.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+#
+# Copyright 2017, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for atest."""
+
+import mock
+import os
+import unittest
+
+import atest
+
+
+class AtestUnittests(unittest.TestCase):
+
+ @mock.patch('os.environ.get', return_value=None)
+ def test_uninitalized_check_environment(self, mock_os_env_get):
+ self.assertFalse(atest._has_environment_variables())
+
+
+ @mock.patch('os.environ.get', return_value='out/testcases/')
+ def test_initalized_check_environment(self, mock_os_env_get):
+ self.assertTrue(atest._has_environment_variables())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/prod-tests/res/config/template/local_min.xml b/prod-tests/res/config/template/local_min.xml
index 006074e..c222faa 100644
--- a/prod-tests/res/config/template/local_min.xml
+++ b/prod-tests/res/config/template/local_min.xml
@@ -28,5 +28,6 @@
</logger>
<log_saver class="com.android.tradefed.result.FileSystemLogSaver" />
<result_reporter class="com.android.tradefed.result.ConsoleResultReporter" />
+ <result_reporter class="com.android.tradefed.result.suite.SuiteResultReporter" />
<template-include name="reporters" default="empty" />
</configuration>
diff --git a/prod-tests/src/com/android/app/tests/AppLaunchTest.java b/prod-tests/src/com/android/app/tests/AppLaunchTest.java
index 3269faa..4161791 100644
--- a/prod-tests/src/com/android/app/tests/AppLaunchTest.java
+++ b/prod-tests/src/com/android/app/tests/AppLaunchTest.java
@@ -133,8 +133,8 @@
i.setDevice(getDevice());
i.addInstrumentationArg("packageName", packageName);
i.run(listener);
- InputStreamSource s = getDevice().getScreenshot();
- listener.testLog("screenshot", LogDataType.PNG, s);
- s.cancel();
+ try (InputStreamSource s = getDevice().getScreenshot()) {
+ listener.testLog("screenshot", LogDataType.PNG, s);
+ }
}
}
diff --git a/prod-tests/src/com/android/framework/tests/BandwidthMicroBenchMarkTest.java b/prod-tests/src/com/android/framework/tests/BandwidthMicroBenchMarkTest.java
index 0ef0084..e0865a5 100644
--- a/prod-tests/src/com/android/framework/tests/BandwidthMicroBenchMarkTest.java
+++ b/prod-tests/src/com/android/framework/tests/BandwidthMicroBenchMarkTest.java
@@ -177,9 +177,9 @@
}
private void saveFile(String spongeName, ITestInvocationListener listener, File file) {
- InputStreamSource stream = new FileInputStreamSource(file);
- listener.testLog(spongeName, LogDataType.TEXT, stream);
- stream.cancel();
+ try (InputStreamSource stream = new FileInputStreamSource(file)) {
+ listener.testLog(spongeName, LogDataType.TEXT, stream);
+ }
}
/**
diff --git a/prod-tests/src/com/android/framework/tests/DataIdleTest.java b/prod-tests/src/com/android/framework/tests/DataIdleTest.java
index 82c2674..eeccbf5 100644
--- a/prod-tests/src/com/android/framework/tests/DataIdleTest.java
+++ b/prod-tests/src/com/android/framework/tests/DataIdleTest.java
@@ -119,9 +119,9 @@
* @param listener {@link ITestInvocationListener}
*/
void logBugReport(ITestInvocationListener listener) {
- InputStreamSource bugreport = mTestDevice.getBugreport();
- listener.testLog(BUG_REPORT_LABEL, LogDataType.BUGREPORT, bugreport);
- bugreport.cancel();
+ try (InputStreamSource bugreport = mTestDevice.getBugreport()) {
+ listener.testLog(BUG_REPORT_LABEL, LogDataType.BUGREPORT, bugreport);
+ }
}
/**
diff --git a/prod-tests/src/com/android/framework/tests/DownloadManagerHostTests.java b/prod-tests/src/com/android/framework/tests/DownloadManagerHostTests.java
index 9d2c978..38838fc 100644
--- a/prod-tests/src/com/android/framework/tests/DownloadManagerHostTests.java
+++ b/prod-tests/src/com/android/framework/tests/DownloadManagerHostTests.java
@@ -230,10 +230,10 @@
CLog.w("dumpsys wifi did not return output");
} else {
String name = test.getTestName() +"-dumpsys-wifi";
- ByteArrayInputStreamSource stream =
- new ByteArrayInputStreamSource(output.getBytes());
- super.testLog(name, LogDataType.TEXT, stream);
- stream.cancel();
+ try (ByteArrayInputStreamSource stream =
+ new ByteArrayInputStreamSource(output.getBytes())) {
+ super.testLog(name, LogDataType.TEXT, stream);
+ }
}
} catch (DeviceNotAvailableException e) {
CLog.e("Error getting dumpsys wifi");
diff --git a/prod-tests/src/com/android/framework/tests/FrameworkStressTest.java b/prod-tests/src/com/android/framework/tests/FrameworkStressTest.java
index 21f085c..41e601b 100644
--- a/prod-tests/src/com/android/framework/tests/FrameworkStressTest.java
+++ b/prod-tests/src/com/android/framework/tests/FrameworkStressTest.java
@@ -144,8 +144,7 @@
// Retrieve bugreport
BugreportParser parser = new BugreportParser();
BugreportItem bugreport = null;
- InputStreamSource bugSource = mTestDevice.getBugreport();
- try {
+ try (InputStreamSource bugSource = mTestDevice.getBugreport()) {
mListener.testLog(BUGREPORT_LOG_NAME, LogDataType.BUGREPORT, bugSource);
bugreport = parser.parse(new BufferedReader(new InputStreamReader(
bugSource.createInputStream())));
@@ -154,8 +153,6 @@
} catch (IOException e) {
Assert.fail(String.format("Failed to fetch and parse bugreport for device %s: "
+ "%s", mTestDevice.getSerialNumber(), e));
- } finally {
- bugSource.cancel();
}
LogcatItem systemLog = bugreport.getSystemLog();
// We only add errors found since last test run.
diff --git a/prod-tests/src/com/android/framework/tests/PreloadedClassesTest.java b/prod-tests/src/com/android/framework/tests/PreloadedClassesTest.java
index 7f58f28..04ab7b7 100644
--- a/prod-tests/src/com/android/framework/tests/PreloadedClassesTest.java
+++ b/prod-tests/src/com/android/framework/tests/PreloadedClassesTest.java
@@ -138,11 +138,11 @@
// Generate the corresponding preloaded classes
File classes = writePreloadedClasses(threshold);
if (classes != null) {
- FileInputStreamSource stream = new FileInputStreamSource(classes);
- String name = String.format("preloaded-classes-threshold-%s", thresholdStr);
- listener.testLog(name, LogDataType.TEXT, stream);
+ try (FileInputStreamSource stream = new FileInputStreamSource(classes)) {
+ String name = String.format("preloaded-classes-threshold-%s", thresholdStr);
+ listener.testLog(name, LogDataType.TEXT, stream);
+ }
// Clean up after uploading
- stream.cancel();
FileUtil.deleteFile(classes);
} else {
String msg = String.format(
diff --git a/prod-tests/src/com/android/graphics/tests/ImageProcessingTest.java b/prod-tests/src/com/android/graphics/tests/ImageProcessingTest.java
index a6909cd..e3d0382 100644
--- a/prod-tests/src/com/android/graphics/tests/ImageProcessingTest.java
+++ b/prod-tests/src/com/android/graphics/tests/ImageProcessingTest.java
@@ -86,9 +86,9 @@
mTestDevice.runInstrumentationTests(runner, collectListener, bugListener, standardListener);
// Capture a bugreport after the test
- InputStreamSource bugreport = mTestDevice.getBugreport();
- standardListener.testLog("bugreport.txt", LogDataType.BUGREPORT, bugreport);
- bugreport.cancel();
+ try (InputStreamSource bugreport = mTestDevice.getBugreport()) {
+ standardListener.testLog("bugreport.txt", LogDataType.BUGREPORT, bugreport);
+ }
// Collect test metrics from the instrumentation test output.
Map<String, String> resultMetrics = new HashMap<String, String>();
diff --git a/prod-tests/src/com/android/graphics/tests/OpenGlPerformanceTest.java b/prod-tests/src/com/android/graphics/tests/OpenGlPerformanceTest.java
index d81ed7b..d2340ee 100644
--- a/prod-tests/src/com/android/graphics/tests/OpenGlPerformanceTest.java
+++ b/prod-tests/src/com/android/graphics/tests/OpenGlPerformanceTest.java
@@ -189,9 +189,9 @@
private void logOutputFile(ITestInvocationListener listener)
throws DeviceNotAvailableException {
// take a bug report, it is possible the system crashed
- InputStreamSource bugreport = mTestDevice.getBugreport();
- listener.testLog("bugreport.txt", LogDataType.BUGREPORT, bugreport);
- bugreport.cancel();
+ try (InputStreamSource bugreport = mTestDevice.getBugreport()) {
+ listener.testLog("bugreport.txt", LogDataType.BUGREPORT, bugreport);
+ }
File resFile = null;
InputStreamSource outputSource = null;
diff --git a/prod-tests/src/com/android/graphics/tests/SkiaTest.java b/prod-tests/src/com/android/graphics/tests/SkiaTest.java
index 7d0dc18..900f2ae 100644
--- a/prod-tests/src/com/android/graphics/tests/SkiaTest.java
+++ b/prod-tests/src/com/android/graphics/tests/SkiaTest.java
@@ -189,10 +189,10 @@
listener.testFailed(testId, "Failed to pull " + remotePath);
} else {
CLog.v("pulled result file to " + localFile.getPath());
- FileInputStreamSource source = new FileInputStreamSource(localFile);
- // Use the original name, for clarity.
- listener.testLog(remoteFile.getName(), type, source);
- source.cancel();
+ try (FileInputStreamSource source = new FileInputStreamSource(localFile)) {
+ // Use the original name, for clarity.
+ listener.testLog(remoteFile.getName(), type, source);
+ }
if (!localFile.delete()) {
CLog.w("Failed to delete temporary file %s", localFile.getPath());
}
diff --git a/prod-tests/src/com/android/graphics/tests/UiPerformanceTest.java b/prod-tests/src/com/android/graphics/tests/UiPerformanceTest.java
index 166a779..d258f1e 100644
--- a/prod-tests/src/com/android/graphics/tests/UiPerformanceTest.java
+++ b/prod-tests/src/com/android/graphics/tests/UiPerformanceTest.java
@@ -151,9 +151,9 @@
private void logOutputFile(ITestInvocationListener listener)
throws DeviceNotAvailableException {
// catch a bugreport after the test
- InputStreamSource bugreport = mTestDevice.getBugreport();
- listener.testLog("bugreport", LogDataType.BUGREPORT, bugreport);
- bugreport.cancel();
+ try (InputStreamSource bugreport = mTestDevice.getBugreport()) {
+ listener.testLog("bugreport", LogDataType.BUGREPORT, bugreport);
+ }
File resFile = null;
InputStreamSource outputSource = null;
diff --git a/prod-tests/src/com/android/media/tests/AdbScreenrecordTest.java b/prod-tests/src/com/android/media/tests/AdbScreenrecordTest.java
new file mode 100644
index 0000000..eb50f50
--- /dev/null
+++ b/prod-tests/src/com/android/media/tests/AdbScreenrecordTest.java
@@ -0,0 +1,433 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.tests;
+
+import com.android.ddmlib.CollectingOutputReceiver;
+import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.tradefed.config.Option;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.IFileEntry;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.testtype.IDeviceTest;
+import com.android.tradefed.testtype.IRemoteTest;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.RunUtil;
+
+import java.io.File;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Tests adb command "screenrecord", i.e. "adb screenrecord [--size] [--bit-rate] [--time-limit]"
+ *
+ * <p>The test use the above command to record a video of DUT's screen. It then tries to verify that
+ * a video was actually recorded and that the video is a valid video file. It currently uses
+ * 'avprobe' to do the video analysis along with extracting parameters from the adb command's
+ * output.
+ */
+public class AdbScreenrecordTest implements IDeviceTest, IRemoteTest {
+
+ //===================================================================
+ // TEST OPTIONS
+ //===================================================================
+ @Option(name = "run-key", description = "Run key for the test")
+ private String mRunKey = "AdbScreenRecord";
+
+ @Option(name = "time-limit", description = "Recording time in seconds", isTimeVal = true)
+ private long mRecordTimeInSeconds = -1;
+
+ @Option(name = "size", description = "Video Size: 'widthxheight', e.g. '1280x720'")
+ private String mVideoSize = null;
+
+ @Option(name = "bit-rate", description = "Video bit rate in megabits per second, e.g. 4000000")
+ private long mBitRate = -1;
+
+ //===================================================================
+ // CLASS VARIABLES
+ //===================================================================
+ private ITestDevice mDevice;
+ private TestRunHelper mTestRunHelper;
+
+ //===================================================================
+ // CONSTANTS
+ //===================================================================
+ private static final long TEST_TIMEOUT_MS = 5 * 60 * 1000; // 5 min
+ private static final long DEVICE_SYNC_MS = 5 * 60 * 1000; // 5 min
+ private static final long POLLING_INTERVAL_MS = 5 * 1000; // 5 sec
+ private static final long CMD_TIMEOUT_MS = 5 * 1000; // 5 sec
+ private static final String ERR_OPTION_MALFORMED = "Test option %1$s is not correct [%2$s]";
+ private static final String OPTION_TIME_LIMIT = "--time-limit";
+ private static final String OPTION_SIZE = "--size";
+ private static final String OPTION_BITRATE = "--bit-rate";
+ private static final String RESULT_KEY_RECORDED_FRAMES = "recorded_frames";
+ private static final String RESULT_KEY_RECORDED_LENGTH = "recorded_length";
+ private static final String RESULT_KEY_VERIFIED_DURATION = "verified_duration";
+ private static final String RESULT_KEY_VERIFIED_BITRATE = "verified_bitrate";
+ private static final String TEST_FILE = "/sdcard/screenrecord_test.mp4";
+ private static final String AVPROBE_NOT_INSTALLED =
+ "Program 'avprobe' is not installed on host '%1$s'";
+ private static final String REGEX_IS_VIDEO_OK =
+ "Duration: (\\d\\d:\\d\\d:\\d\\d.\\d\\d).+bitrate: (\\d+ .b\\/s)";
+ private static final String AVPROBE_STR = "avprobe";
+
+ //===================================================================
+ // ENUMS
+ //===================================================================
+ enum HOST_SOFTWARE {
+ AVPROBE
+ }
+
+ @Override
+ public void setDevice(ITestDevice device) {
+ mDevice = device;
+ }
+
+ @Override
+ public ITestDevice getDevice() {
+ return mDevice;
+ }
+
+ /** Main test function invoked by test harness */
+ @Override
+ public void run(ITestInvocationListener listener) throws DeviceNotAvailableException {
+ initializeTest(listener);
+
+ CLog.i("Verify required software is installed on host");
+ verifyRequiredSoftwareIsInstalled(HOST_SOFTWARE.AVPROBE);
+
+ mTestRunHelper.startTest(1);
+
+ Map<String, String> resultsDictionary = new HashMap<String, String>();
+ try {
+ CLog.i("Verify that test options are valid");
+ if (!verifyTestParameters()) {
+ return;
+ }
+
+ // "resultDictionary" can be used to post results to dashboards like BlackBox
+ resultsDictionary = runTest(resultsDictionary, TEST_TIMEOUT_MS);
+ } finally {
+ final String metricsStr = Arrays.toString(resultsDictionary.entrySet().toArray());
+ CLog.i("Uploading metrics values:\n" + metricsStr);
+ mTestRunHelper.endTest(resultsDictionary);
+ }
+ }
+
+ /**
+ * Test code that calls "adb screenrecord" and checks for pass/fail criterias
+ *
+ * <p>
+ *
+ * <ul>
+ * <li>1. Run adb screenrecord command
+ * <li>2. Wait until there is a video file; fail if none appears
+ * <li>3. Analyze adb output and extract recorded number of frames and video length
+ * <li>4. Pull recorded video file off device
+ * <li>5. Using avprobe, analyze video file and extract duration and bitrate
+ * <li>6. Return extracted results
+ * </ul>
+ *
+ * @throws DeviceNotAvailableException
+ */
+ private Map<String, String> runTest(Map<String, String> results, final long timeout)
+ throws DeviceNotAvailableException {
+ final CollectingOutputReceiver receiver = new CollectingOutputReceiver();
+ final String cmd = generateAdbScreenRecordCommand();
+ final String deviceFileName = getAbsoluteFilename();
+
+ CLog.i("START Execute device shell command: '" + cmd + "'");
+ getDevice().executeShellCommand(cmd, receiver, timeout, TimeUnit.MILLISECONDS, 3);
+ String adbOutput = receiver.getOutput();
+ CLog.i(adbOutput);
+ CLog.i("END Execute device shell command");
+
+ CLog.i("Wait for recorded file: " + deviceFileName);
+ if (!waitForFile(getDevice(), timeout, deviceFileName)) {
+ mTestRunHelper.reportFailure("Recorded test file not found");
+ // Since we don't have a file, no need to delete it; we can return here
+ return results;
+ }
+
+ CLog.i("Get number of recorded frames and recorded length from adb output");
+ if (!extractVideoDataFromAdbOutput(adbOutput, results)) {
+ deleteFileFromDevice(deviceFileName);
+ return results;
+ }
+
+ CLog.i("Get duration and bitrate info from video file using '" + AVPROBE_STR + "'");
+ try {
+ extractDurationAndBitrateFromVideoFileUsingAvprobe(deviceFileName, results);
+ } catch (ParseException e) {
+ throw new RuntimeException(e);
+ }
+ deleteFileFromDevice(deviceFileName);
+ return results;
+ }
+
+ /** Convert a string on form HH:mm:ss.SS to nearest number of seconds */
+ private long convertBitrateToKilobits(String bitrate) {
+ Matcher m = Pattern.compile("(\\d+) (.)b\\/s").matcher(bitrate);
+ if (!m.matches()) {
+ return -1;
+ }
+
+ final String unit = m.group(2).toUpperCase();
+ long factor = 1;
+ switch (unit) {
+ case "K":
+ factor = 1;
+ break;
+ case "M":
+ factor = 1000;
+ break;
+ case "G":
+ factor = 1000000;
+ break;
+ }
+
+ long rate = Long.parseLong(m.group(1));
+
+ return rate * factor;
+ }
+
+ /**
+ * Convert a string on form HH:mm:ss.SS to nearest number of seconds
+ *
+ * @throws ParseException
+ */
+ private long convertDurationToMilliseconds(String duration) throws ParseException {
+ SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SS");
+ sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+ Date convertedDate = sdf.parse("1970-01-01 " + duration);
+ return convertedDate.getTime();
+ }
+
+ /**
+ * Deletes a file off a device
+ *
+ * @param deviceFileName - path and filename to file to be deleted
+ * @throws DeviceNotAvailableException
+ */
+ private void deleteFileFromDevice(String deviceFileName) throws DeviceNotAvailableException {
+ CLog.i("Delete file from device: " + deviceFileName);
+ getDevice().executeShellCommand("rm -f " + deviceFileName);
+ }
+
+ /**
+ * Extracts duration and bitrate data from a video file
+ *
+ * @throws DeviceNotAvailableException
+ * @throws ParseException
+ */
+ private boolean extractDurationAndBitrateFromVideoFileUsingAvprobe(
+ String deviceFileName, Map<String, String> results)
+ throws DeviceNotAvailableException, ParseException {
+ CLog.i("Check if the recorded file has some data in it: " + deviceFileName);
+ IFileEntry video = getDevice().getFileEntry(deviceFileName);
+ if (video == null || video.getFileEntry().getSizeValue() < 1) {
+ mTestRunHelper.reportFailure("Video Entry info failed");
+ return false;
+ }
+
+ final File recordedVideo = getDevice().pullFile(deviceFileName);
+ CLog.i("Recorded video file: " + recordedVideo.getAbsolutePath());
+
+ CommandResult result =
+ RunUtil.getDefault()
+ .runTimedCmd(
+ CMD_TIMEOUT_MS,
+ AVPROBE_STR,
+ "-loglevel",
+ "info",
+ recordedVideo.getAbsolutePath());
+
+ // Remove file from host machine
+ FileUtil.deleteFile(recordedVideo);
+
+ if (result.getStatus() != CommandStatus.SUCCESS) {
+ mTestRunHelper.reportFailure(AVPROBE_STR + " command failed");
+ return false;
+ }
+
+ String data = result.getStderr();
+ CLog.i("data: " + data);
+ if (data == null || data.isEmpty()) {
+ mTestRunHelper.reportFailure(AVPROBE_STR + " output data is empty");
+ return false;
+ }
+
+ Matcher m = Pattern.compile(REGEX_IS_VIDEO_OK).matcher(data);
+ if (!m.find()) {
+ final String errMsg =
+ "Video verification failed; no matching verification pattern found";
+ mTestRunHelper.reportFailure(errMsg);
+ return false;
+ }
+
+ String duration = m.group(1);
+ long durationInMilliseconds = convertDurationToMilliseconds(duration);
+ String bitrate = m.group(2);
+ long bitrateInKilobits = convertBitrateToKilobits(bitrate);
+
+ results.put(RESULT_KEY_VERIFIED_DURATION, Long.toString(durationInMilliseconds / 1000));
+ results.put(RESULT_KEY_VERIFIED_BITRATE, Long.toString(bitrateInKilobits));
+ return true;
+ }
+
+ /** Extracts recorded number of frames and recorded video length from adb output */
+ private boolean extractVideoDataFromAdbOutput(String adbOutput, Map<String, String> results) {
+ final String regEx = "recorded (\\d+) frames in (\\d+) second";
+ Matcher m = Pattern.compile(regEx).matcher(adbOutput);
+ if (!m.find()) {
+ mTestRunHelper.reportFailure("Regular Expression did not find recorded frames");
+ return false;
+ }
+
+ int recordedFrames = Integer.parseInt(m.group(1));
+ int recordedLength = Integer.parseInt(m.group(2));
+ CLog.i("Recorded frames: " + recordedFrames);
+ CLog.i("Recorded length: " + recordedLength);
+ if (recordedFrames <= 0) {
+ mTestRunHelper.reportFailure("No recorded frames detected");
+ return false;
+ }
+
+ results.put(RESULT_KEY_RECORDED_FRAMES, Integer.toString(recordedFrames));
+ results.put(RESULT_KEY_RECORDED_LENGTH, Integer.toString(recordedLength));
+ return true;
+ }
+
+ /** Generates an adb command from passed in test options */
+ private String generateAdbScreenRecordCommand() {
+ final String SPACE = " ";
+ StringBuilder sb = new StringBuilder(128);
+ sb.append("screenrecord --verbose ").append(getAbsoluteFilename());
+
+ // Add test options if they have been passed in to the test
+ if (mRecordTimeInSeconds != -1) {
+ final long timeLimit = TimeUnit.MILLISECONDS.toSeconds(mRecordTimeInSeconds);
+ sb.append(SPACE).append(OPTION_TIME_LIMIT).append(SPACE).append(timeLimit);
+ }
+
+ if (mVideoSize != null) {
+ sb.append(SPACE).append(OPTION_SIZE).append(SPACE).append(mVideoSize);
+ }
+
+ if (mBitRate != -1) {
+ sb.append(SPACE).append(OPTION_BITRATE).append(SPACE).append(mBitRate);
+ }
+
+ return sb.toString();
+ }
+
+ /** Returns absolute path to device recorded video file */
+ private String getAbsoluteFilename() {
+ return TEST_FILE;
+ }
+
+ /** Performs test initialization steps */
+ private void initializeTest(ITestInvocationListener listener)
+ throws UnsupportedOperationException, DeviceNotAvailableException {
+ TestIdentifier testId = new TestIdentifier(getClass().getCanonicalName(), mRunKey);
+
+ // Allocate helpers
+ mTestRunHelper = new TestRunHelper(listener, testId);
+
+ getDevice().disableKeyguard();
+ getDevice().waitForDeviceAvailable(DEVICE_SYNC_MS);
+
+ CLog.i("Sync device time to host time");
+ getDevice().setDate(new Date());
+ }
+
+ /** Verifies that required software is installed on host machine */
+ private void verifyRequiredSoftwareIsInstalled(HOST_SOFTWARE software) {
+ String swName = "";
+ switch (software) {
+ case AVPROBE:
+ swName = AVPROBE_STR;
+ CommandResult result =
+ RunUtil.getDefault().runTimedCmd(CMD_TIMEOUT_MS, swName, "-version");
+ String output = result.getStdout();
+ if (result.getStatus() == CommandStatus.SUCCESS && output.startsWith(swName)) {
+ return;
+ }
+ break;
+ }
+
+ CLog.i("Program '" + swName + "' not found, report test failure");
+ String hostname = RunUtil.getDefault().runTimedCmd(CMD_TIMEOUT_MS, "hostname").getStdout();
+
+ String err = String.format(AVPROBE_NOT_INSTALLED, (hostname == null) ? "" : hostname);
+ throw new RuntimeException(err);
+ }
+
+ /** Verifies that passed in test parameters are legitimate */
+ private boolean verifyTestParameters() {
+ if (mRecordTimeInSeconds != -1 && mRecordTimeInSeconds < 1) {
+ final String error =
+ String.format(ERR_OPTION_MALFORMED, OPTION_TIME_LIMIT, mRecordTimeInSeconds);
+ mTestRunHelper.reportFailure(error);
+ return false;
+ }
+
+ if (mVideoSize != null) {
+ final String videoSizeRegEx = "\\d+x\\d+";
+ Matcher m = Pattern.compile(videoSizeRegEx).matcher(mVideoSize);
+ if (!m.matches()) {
+ final String error = String.format(ERR_OPTION_MALFORMED, OPTION_SIZE, mVideoSize);
+ mTestRunHelper.reportFailure(error);
+ return false;
+ }
+ }
+
+ if (mBitRate != -1 && mBitRate < 1) {
+ final String error = String.format(ERR_OPTION_MALFORMED, OPTION_BITRATE, mBitRate);
+ mTestRunHelper.reportFailure(error);
+ return false;
+ }
+
+ return true;
+ }
+
+ /** Checks for existence of a file on the device */
+ private static boolean waitForFile(
+ ITestDevice device, final long timeout, final String absoluteFilename)
+ throws DeviceNotAvailableException {
+ final long checkFileStartTime = System.currentTimeMillis();
+
+ do {
+ RunUtil.getDefault().sleep(POLLING_INTERVAL_MS);
+ if (device.doesFileExist(absoluteFilename)) {
+ return true;
+ }
+ } while (System.currentTimeMillis() - checkFileStartTime < timeout);
+
+ return false;
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/AudioLevelUtility.java b/prod-tests/src/com/android/media/tests/AudioLevelUtility.java
new file mode 100644
index 0000000..3aee1fb
--- /dev/null
+++ b/prod-tests/src/com/android/media/tests/AudioLevelUtility.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.tests;
+
+import com.android.ddmlib.CollectingOutputReceiver;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+
+import java.util.concurrent.TimeUnit;
+
+/** Class to provide audio level utility functions for a test device */
+public class AudioLevelUtility {
+
+ public static int extractDeviceAudioLevelFromAdbShell(ITestDevice device)
+ throws DeviceNotAvailableException {
+
+ final String ADB_SHELL_DUMPSYS_AUDIO = "dumpsys audio";
+ final String STREAM_MUSIC = "- STREAM_MUSIC:";
+ final String HEADSET = "(headset): ";
+
+ final CollectingOutputReceiver receiver = new CollectingOutputReceiver();
+
+ device.executeShellCommand(
+ ADB_SHELL_DUMPSYS_AUDIO, receiver, 300, TimeUnit.MILLISECONDS, 1);
+ final String shellOutput = receiver.getOutput();
+ if (shellOutput == null || shellOutput.isEmpty()) {
+ return -1;
+ }
+
+ int audioLevel = -1;
+ int pos = shellOutput.indexOf(STREAM_MUSIC);
+ if (pos != -1) {
+ pos = shellOutput.indexOf(HEADSET, pos);
+ if (pos != -1) {
+ final int start = pos + HEADSET.length();
+ final int stop = shellOutput.indexOf(",", start);
+ if (stop != -1) {
+ final String audioLevelStr = shellOutput.substring(start, stop);
+ try {
+ audioLevel = Integer.parseInt(audioLevelStr);
+ } catch (final NumberFormatException e) {
+ CLog.e(e.getMessage());
+ audioLevel = 1;
+ }
+ }
+ }
+ }
+
+ return audioLevel;
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/AudioLoopbackImageAnalyzer.java b/prod-tests/src/com/android/media/tests/AudioLoopbackImageAnalyzer.java
new file mode 100644
index 0000000..3b6d767
--- /dev/null
+++ b/prod-tests/src/com/android/media/tests/AudioLoopbackImageAnalyzer.java
@@ -0,0 +1,475 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.tests;
+
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.util.Pair;
+
+import java.awt.image.BufferedImage;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import javax.imageio.ImageIO;
+
+/**
+ * Class that analyzes a screenshot captured from AudioLoopback test. There is a wave form in the
+ * screenshot that has specific colors (TARGET_COLOR). This class extracts those colors and analyzes
+ * wave amplitude, duration and form and make a decision if it's a legitimate wave form or not.
+ */
+public class AudioLoopbackImageAnalyzer {
+
+ // General
+ private static final int HORIZONTAL_THRESHOLD = 10;
+ private static final int VERTICAL_THRESHOLD = 0;
+ private static final int PRIMARY_WAVE_COLOR = 0xFF1E4A99;
+ private static final int SECONDARY_WAVE_COLOR = 0xFF1D4998;
+ private static final int[] TARGET_COLORS_TABLET =
+ new int[] {PRIMARY_WAVE_COLOR, SECONDARY_WAVE_COLOR};
+ private static final int[] TARGET_COLORS_PHONE = new int[] {PRIMARY_WAVE_COLOR};
+
+ private static final float EXPERIMENTAL_WAVE_MAX_TABLET = 69.0f; // In percent of image height
+ private static final float EXPERIMENTAL_WAVE_MAX_PHONE = 32.0f; // In percent of image height
+
+ // Image
+ private static final int TABLET_SCREEN_MIN_WIDTH = 1700;
+ private static final int TABLET_SCREEN_MIN_HEIGHT = 2300;
+
+ // Duration parameters
+ // Max duration should not span more than 2 of the 11 sections in the graph
+ // Min duration should not be less than 1/4 of a section
+ private static final float SECTION_WIDTH_IN_PERCENT = 100 * 1 / 11; // In percent of image width
+ private static final float DURATION_MIN = SECTION_WIDTH_IN_PERCENT / 4;
+
+ // Amplitude
+ // Required numbers of column for a response
+ private static final int MIN_NUMBER_OF_COLUMNS = 4;
+ // The difference between two amplitude columns should not be more than this
+ private static final float MAX_ALLOWED_COLUMN_DECREASE = 0.50f;
+ // Only check MAX_ALLOWED_COLUMN_DECREASE up to this number
+ private static final float MIN_NUMBER_OF_DECREASING_COLUMNS = 8;
+
+ enum Result {
+ PASS,
+ FAIL,
+ UNKNOWN
+ }
+
+ private static class Amplitude {
+ public int maxHeight = -1;
+ public int zeroCounter = 0;
+ }
+
+ public static Pair<Result, String> analyzeImage(String imgFile) {
+ final String FN_TAG = "AudioLoopbackImageAnalyzer.analyzeImage";
+
+ BufferedImage img = null;
+ try {
+ final File f = new File(imgFile);
+ img = ImageIO.read(f);
+ } catch (final IOException e) {
+ CLog.e(e);
+ throw new RuntimeException("Error loading image file '" + imgFile + "'");
+ }
+
+ final int width = img.getWidth();
+ final int height = img.getHeight();
+
+ CLog.i("image width=" + width + ", height=" + height);
+
+ // Compute thresholds and min/max values based on image witdh, height
+ final float waveMax;
+ final int[] targetColors;
+ final int amplitudeCenterMaxDiff;
+ final float maxDuration;
+ final int minNrOfZeroesBetweenAmplitudes;
+
+ if (width >= TABLET_SCREEN_MIN_WIDTH && height >= TABLET_SCREEN_MIN_HEIGHT) {
+ CLog.i("Apply TABLET config values");
+ waveMax = EXPERIMENTAL_WAVE_MAX_TABLET;
+ amplitudeCenterMaxDiff = 40;
+ minNrOfZeroesBetweenAmplitudes = 8;
+ maxDuration = 3 * SECTION_WIDTH_IN_PERCENT;
+ targetColors = TARGET_COLORS_TABLET;
+ } else {
+ waveMax = EXPERIMENTAL_WAVE_MAX_PHONE;
+ amplitudeCenterMaxDiff = 20;
+ minNrOfZeroesBetweenAmplitudes = 5;
+ maxDuration = 2.5f * SECTION_WIDTH_IN_PERCENT;
+ targetColors = TARGET_COLORS_PHONE;
+ }
+
+ // Amplitude
+ // Max height should be about 80% of wave max.
+ // Min height should be about 40% of wave max.
+ final float AMPLITUDE_MAX_VALUE = waveMax * 0.8f;
+ final float AMPLITUDE_MIN_VALUE = waveMax * 0.4f;
+
+ final int[] vertical = new int[height];
+ final int[] horizontal = new int[width];
+
+ projectPixelsToXAxis(img, targetColors, horizontal, width, height);
+ filter(horizontal, HORIZONTAL_THRESHOLD);
+ final Pair<Integer, Integer> durationBounds = getBounds(horizontal);
+ if (!boundsWithinRange(durationBounds, 0, width)) {
+ final String fmt = "%1$s Upper/Lower bound along horizontal axis not found";
+ final String err = String.format(fmt, FN_TAG);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ projectPixelsToYAxis(img, targetColors, vertical, height, durationBounds);
+ filter(vertical, VERTICAL_THRESHOLD);
+ final Pair<Integer, Integer> amplitudeBounds = getBounds(vertical);
+ if (!boundsWithinRange(durationBounds, 0, height)) {
+ final String fmt = "%1$s: Upper/Lower bound along vertical axis not found";
+ final String err = String.format(fmt, FN_TAG);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ final int durationLeft = durationBounds.first.intValue();
+ final int durationRight = durationBounds.second.intValue();
+ final int amplitudeTop = amplitudeBounds.first.intValue();
+ final int amplitudeBottom = amplitudeBounds.second.intValue();
+
+ final float amplitude = (amplitudeBottom - amplitudeTop) * 100.0f / height;
+ final float duration = (durationRight - durationLeft) * 100.0f / width;
+
+ CLog.i("AudioLoopbackImageAnalyzer: Amplitude=" + amplitude + ", Duration=" + duration);
+
+ Pair<Result, String> amplResult =
+ analyzeAmplitude(
+ vertical,
+ amplitude,
+ amplitudeTop,
+ amplitudeBottom,
+ AMPLITUDE_MIN_VALUE,
+ AMPLITUDE_MAX_VALUE,
+ amplitudeCenterMaxDiff);
+ if (amplResult.first != Result.PASS) {
+ return amplResult;
+ }
+
+ amplResult =
+ analyzeDuration(
+ horizontal,
+ duration,
+ durationLeft,
+ durationRight,
+ DURATION_MIN,
+ maxDuration,
+ MIN_NUMBER_OF_COLUMNS,
+ minNrOfZeroesBetweenAmplitudes);
+ if (amplResult.first != Result.PASS) {
+ return amplResult;
+ }
+
+ return new Pair<Result, String>(Result.PASS, "");
+ }
+
+ /**
+ * Function to analyze the waveforms duration (how wide it stretches along x-axis) and to make
+ * sure the waveform degrades nicely, i.e. the amplitude columns becomes smaller and smaller
+ * over time.
+ *
+ * @param horizontal - int array with waveforms amplitude values
+ * @param duration - calculated length of duration in percent of screen width
+ * @param durationLeft - index for "horizontal" where waveform starts
+ * @param durationRight - index for "horizontal" where waveform ends
+ * @param durationMin - if duration is below this value, return FAIL and failure reason
+ * @param durationMax - if duration exceed this value, return FAIL and failure reason
+ * @param minNumberOfAmplitudes - min number of amplitudes (columns) in waveform to pass test
+ * @param minNrOfZeroesBetweenAmplitudes - min number of required zeroes between amplitudes
+ * @return - returns result status and failure reason, if any
+ */
+ private static Pair<Result, String> analyzeDuration(
+ int[] horizontal,
+ float duration,
+ int durationLeft,
+ int durationRight,
+ final float durationMin,
+ final float durationMax,
+ final int minNumberOfAmplitudes,
+ final int minNrOfZeroesBetweenAmplitudes) {
+ // This is the tricky one; basically, there should be "columns" that starts
+ // at "durationLeft", with the tallest column to the left and then column
+ // height will drop until it fades completely after "durationRight".
+ final String FN_TAG = "AudioLoopbackImageAnalyzer.analyzeDuration";
+
+ if (duration < durationMin || duration > durationMax) {
+ final String fmt = "%1$s: Duration outside range, value=%2$f, range=(%3$f,%4$f)";
+ return handleError(fmt, FN_TAG, duration, durationMin, durationMax);
+ }
+
+ final ArrayList<Amplitude> amplitudes = new ArrayList<Amplitude>();
+ Amplitude currentAmplitude = null;
+ int zeroCounter = 0;
+
+ for (int i = durationLeft; i < durationRight; i++) {
+ final int v = horizontal[i];
+ if (v == 0) {
+ zeroCounter++;
+ } else {
+ CLog.i("index=" + i + ", v=" + v);
+
+ if (zeroCounter > minNrOfZeroesBetweenAmplitudes) {
+ // Found a new amplitude; update old amplitude
+ // with the "gap" count - i.e. nr of zeroes between the amplitudes
+ if (currentAmplitude != null) {
+ currentAmplitude.zeroCounter = zeroCounter;
+ }
+
+ // Create new Amplitude object
+ currentAmplitude = new Amplitude();
+ amplitudes.add(currentAmplitude);
+ }
+
+ // Reset counter
+ zeroCounter = 0;
+
+ if (currentAmplitude != null && v > currentAmplitude.maxHeight) {
+ currentAmplitude.maxHeight = horizontal[i];
+ }
+ }
+ }
+
+ StringBuilder sb = new StringBuilder(128);
+ int counter = 0;
+ for (final Amplitude a : amplitudes) {
+ CLog.i(
+ sb.append("Amplitude=")
+ .append(counter)
+ .append(", MaxHeight=")
+ .append(a.maxHeight)
+ .append(", ZeroesToNextColumn=")
+ .append(a.zeroCounter)
+ .toString());
+ counter++;
+ sb.setLength(0);
+ }
+
+ if (amplitudes.size() < minNumberOfAmplitudes) {
+ final String fmt = "%1$s: Not enough amplitude columns, value=%2$d";
+ return handleError(fmt, FN_TAG, amplitudes.size());
+ }
+
+ int currentColumnHeight = -1;
+ int oldColumnHeight = -1;
+ for (int i = 0; i < amplitudes.size(); i++) {
+ if (i == 0) {
+ oldColumnHeight = amplitudes.get(i).maxHeight;
+ continue;
+ }
+
+ currentColumnHeight = amplitudes.get(i).maxHeight;
+ if (oldColumnHeight > currentColumnHeight) {
+ // We want at least a good number of columns that declines nicely.
+ // After MIN_NUMBER_OF_DECREASING_COLUMNS, we don't really care that much
+ if (i < MIN_NUMBER_OF_DECREASING_COLUMNS
+ && currentColumnHeight < (oldColumnHeight * MAX_ALLOWED_COLUMN_DECREASE)) {
+ final String fmt =
+ "%1$s: Amplitude column heights declined too much, "
+ + "old=%2$d, new=%3$d, column=%4$d";
+ return handleError(fmt, FN_TAG, oldColumnHeight, currentColumnHeight, i);
+ }
+ oldColumnHeight = currentColumnHeight;
+ } else if (oldColumnHeight == currentColumnHeight) {
+ if (i < MIN_NUMBER_OF_DECREASING_COLUMNS) {
+ final String fmt =
+ "%1$s: Amplitude column heights are same, "
+ + "old=%2$d, new=%3$d, column=%4$d";
+ return handleError(fmt, FN_TAG, oldColumnHeight, currentColumnHeight, i);
+ }
+ } else {
+ final String fmt =
+ "%1$s: Amplitude column heights don't decline, "
+ + "old=%2$d, new=%3$d, column=%4$d";
+ return handleError(fmt, FN_TAG, oldColumnHeight, currentColumnHeight, i);
+ }
+ }
+
+ return new Pair<Result, String>(Result.PASS, "");
+ }
+
+ /**
+ * Function to analyze the waveforms duration (how wide it stretches along x-axis) and to make
+ * sure the waveform degrades nicely, i.e. the amplitude columns becomes smaller and smaller
+ * over time.
+ *
+ * @param vertical - integer array with waveforms amplitude accumulated values
+ * @param amplitude - calculated height of amplitude in percent of screen height
+ * @param amplitudeTop - index in "vertical" array where waveform starts
+ * @param amplitudeBottom - index in "vertical" array where waveform ends
+ * @param amplitudeMin - if amplitude is below this value, return FAIL and failure reason
+ * @param amplitudeMax - if amplitude exceed this value, return FAIL and failure reason
+ * @param amplitudeCenterDiffThreshold - threshold to check that waveform is centered
+ * @return - returns result status and failure reason, if any
+ */
+ private static Pair<Result, String> analyzeAmplitude(
+ int[] vertical,
+ float amplitude,
+ int amplitudeTop,
+ int amplitudeBottom,
+ final float amplitudeMin,
+ final float amplitudeMax,
+ final int amplitudeCenterDiffThreshold) {
+ final String FN_TAG = "AudioLoopbackImageAnalyzer.analyzeAmplitude";
+
+ if (amplitude < amplitudeMin || amplitude > amplitudeMax) {
+ final String fmt = "%1$s: Amplitude outside range, value=%2$f, range=(%3$f,%4$f)";
+ final String err = String.format(fmt, FN_TAG, amplitude, amplitudeMin, amplitudeMax);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ // Are the amplitude top/bottom centered around the centerline?
+ final int amplitudeCenter = getAmplitudeCenter(vertical, amplitudeTop, amplitudeBottom);
+ final int topDiff = amplitudeCenter - amplitudeTop;
+ final int bottomDiff = amplitudeBottom - amplitudeCenter;
+ final int diff = Math.abs(topDiff - bottomDiff);
+
+ if (diff < amplitudeCenterDiffThreshold) {
+ return new Pair<Result, String>(Result.PASS, "");
+ }
+
+ final String fmt =
+ "%1$s: Amplitude not centered topDiff=%2$d, bottomDiff=%3$d, "
+ + "center=%4$d, diff=%5$d";
+ final String err = String.format(fmt, FN_TAG, topDiff, bottomDiff, amplitudeCenter, diff);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ private static int getAmplitudeCenter(int[] vertical, int amplitudeTop, int amplitudeBottom) {
+ int max = -1;
+ int center = -1;
+ for (int i = amplitudeTop; i < amplitudeBottom; i++) {
+ if (vertical[i] > max) {
+ max = vertical[i];
+ center = i;
+ }
+ }
+
+ return center;
+ }
+
+ private static void projectPixelsToXAxis(
+ BufferedImage img,
+ final int[] targetColors,
+ int[] horizontal,
+ final int width,
+ final int height) {
+ // "Flatten image" by projecting target colors horizontally,
+ // counting number of found pixels in each column
+ for (int y = 0; y < height; y++) {
+ for (int x = 0; x < width; x++) {
+ final int color = img.getRGB(x, y);
+ for (final int targetColor : targetColors) {
+ if (color == targetColor) {
+ horizontal[x]++;
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ private static void projectPixelsToYAxis(
+ BufferedImage img,
+ final int[] targetColors,
+ int[] vertical,
+ int height,
+ Pair<Integer, Integer> horizontalMinMax) {
+
+ final int min = horizontalMinMax.first.intValue();
+ final int max = horizontalMinMax.second.intValue();
+
+ // "Flatten image" by projecting target colors (between min/max) vertically,
+ // counting number of found pixels in each row
+
+ // Pass over y-axis, restricted to horizontalMin, horizontalMax
+ for (int y = 0; y < height; y++) {
+ for (int x = min; x <= max; x++) {
+ final int color = img.getRGB(x, y);
+ for (final int targetColor : targetColors) {
+ if (color == targetColor) {
+ vertical[y]++;
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ private static Pair<Integer, Integer> getBounds(int[] array) {
+ // Determine min, max
+ int min = -1;
+ for (int i = 0; i < array.length; i++) {
+ if (array[i] > 0) {
+ min = i;
+ break;
+ }
+ }
+
+ int max = -1;
+ for (int i = array.length - 1; i >= 0; i--) {
+ if (array[i] > 0) {
+ max = i;
+ break;
+ }
+ }
+
+ return new Pair<Integer, Integer>(Integer.valueOf(min), Integer.valueOf(max));
+ }
+
+ private static void filter(int[] array, final int threshold) {
+ // Filter horizontal array; set all values < threshold to 0
+ for (int i = 0; i < array.length; i++) {
+ final int v = array[i];
+ if (v != 0 && v <= threshold) {
+ array[i] = 0;
+ }
+ }
+ }
+
+ private static boolean boundsWithinRange(Pair<Integer, Integer> bounds, int low, int high) {
+ return low <= bounds.first.intValue()
+ && bounds.first.intValue() < high
+ && low <= bounds.second.intValue()
+ && bounds.second.intValue() < high;
+ }
+
+ private static Pair<Result, String> handleError(String fmt, String tag, int arg1) {
+ final String err = String.format(fmt, tag, arg1);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ private static Pair<Result, String> handleError(
+ String fmt, String tag, int arg1, int arg2, int arg3) {
+ final String err = String.format(fmt, tag, arg1, arg2, arg3);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+
+ private static Pair<Result, String> handleError(
+ String fmt, String tag, float arg1, float arg2, float arg3) {
+ final String err = String.format(fmt, tag, arg1, arg2, arg3);
+ CLog.w(err);
+ return new Pair<Result, String>(Result.FAIL, err);
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/AudioLoopbackTest.java b/prod-tests/src/com/android/media/tests/AudioLoopbackTest.java
index ce737fd..1d9bab5 100644
--- a/prod-tests/src/com/android/media/tests/AudioLoopbackTest.java
+++ b/prod-tests/src/com/android/media/tests/AudioLoopbackTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Android Open Source Project
+ * Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -13,47 +13,58 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package com.android.media.tests;
-import com.android.ddmlib.CollectingOutputReceiver;
+import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
+
+import com.android.ddmlib.NullOutputReceiver;
import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.media.tests.AudioLoopbackTestHelper.LogFileType;
+import com.android.media.tests.AudioLoopbackTestHelper.ResultData;
import com.android.tradefed.config.Option;
import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.log.LogUtil.CLog;
import com.android.tradefed.result.FileInputStreamSource;
import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.result.InputStreamSource;
import com.android.tradefed.result.LogDataType;
import com.android.tradefed.testtype.IDeviceTest;
import com.android.tradefed.testtype.IRemoteTest;
+import com.android.tradefed.util.FileUtil;
import com.android.tradefed.util.RunUtil;
-import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
import java.io.IOException;
+import java.nio.file.Files;
+import java.util.Arrays;
import java.util.Collections;
+import java.util.Date;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
- * A harness that launches Audio Loopback tool and reports result.
+ * Runs Audio Latency and Audio Glitch test and reports result.
+ *
+ * <p>Strategy for Audio Latency Stress test: RUN test 1000 times. In each iteration, collect result
+ * files from device, parse and collect data in a ResultData object that also keeps track of
+ * location to test files for a particular iteration.
+ *
+ * <p>ANALYZE test results to produce statistics for 1. Latency and Confidence (Min, Max, Mean,
+ * Median) 2. Create CSV file with test run data 3. Print bad test data to host log file 4. Get
+ * number of test runs with valid data to send to dashboard 5. Produce histogram in host log file;
+ * count number of test results that fall into 1 ms wide buckets.
+ *
+ * <p>UPLOAD test results + log files from “bad” runs; i.e. runs that is missing some or all result
+ * data.
*/
public class AudioLoopbackTest implements IDeviceTest, IRemoteTest {
- private static final long TIMEOUT_MS = 5 * 60 * 1000; // 5 min
- private static final long DEVICE_SYNC_MS = 5 * 60 * 1000; // 5 min
- private static final long POLLING_INTERVAL_MS = 5 * 1000;
- private static final int MAX_ATTEMPTS = 3;
- private static final String TESTTYPE_LATENCY = "222";
- private static final String TESTTYPE_BUFFER = "223";
-
- private static final Map<String, String> METRICS_KEY_MAP = createMetricsKeyMap();
-
- private ITestDevice mDevice;
-
+ //===================================================================
+ // TEST OPTIONS
+ //===================================================================
@Option(name = "run-key", description = "Run key for the test")
private String mRunKey = "AudioLoopback";
@@ -66,12 +77,16 @@
@Option(name = "audio-thread", description = "Audio Thread for Loopback app")
private String mAudioThread = "1";
- @Option(name = "audio-level", description = "Audio Level for Loopback app. " +
- "A device specific param which makes waveform in loopback test hit 60% to 80% range")
- private String mAudioLevel = "12";
+ @Option(
+ name = "audio-level",
+ description =
+ "Audio Level for Loopback app. A device specific"
+ + "param which makes waveform in loopback test hit 60% to 80% range"
+ )
+ private String mAudioLevel = "-1";
@Option(name = "test-type", description = "Test type to be executed")
- private String mTestType = TESTTYPE_LATENCY;
+ private String mTestType = TESTTYPE_LATENCY_STR;
@Option(name = "buffer-test-duration", description = "Buffer test duration in seconds")
private String mBufferTestDuration = "10";
@@ -79,212 +94,656 @@
@Option(name = "key-prefix", description = "Key Prefix for reporting")
private String mKeyPrefix = "48000_Mic3_";
+ @Option(name = "iterations", description = "Number of test iterations")
+ private int mIterations = 1;
+
+ @Option(name = "baseline_latency", description = "")
+ private float mBaselineLatency = 0f;
+
+ //===================================================================
+ // CLASS VARIABLES
+ //===================================================================
+ private static final Map<String, String> METRICS_KEY_MAP = createMetricsKeyMap();
+ private Map<LogFileType, LogFileData> mFileDataKeyMap;
+ private ITestDevice mDevice;
+ private TestRunHelper mTestRunHelper;
+ private AudioLoopbackTestHelper mLoopbackTestHelper;
+
+ //===================================================================
+ // CONSTANTS
+ //===================================================================
+ private static final String TESTTYPE_LATENCY_STR = "222";
+ private static final String TESTTYPE_GLITCH_STR = "223";
+ private static final long TIMEOUT_MS = 5 * 60 * 1000; // 5 min
+ private static final long DEVICE_SYNC_MS = 5 * 60 * 1000; // 5 min
+ private static final long POLLING_INTERVAL_MS = 5 * 1000;
+ private static final int MAX_ATTEMPTS = 3;
+ private static final int MAX_NR_OF_LOG_UPLOADS = 100;
+
+ private static final int LATENCY_ITERATIONS_LOWER_BOUND = 1;
+ private static final int LATENCY_ITERATIONS_UPPER_BOUND = 10000;
+ private static final int GLITCH_ITERATIONS_LOWER_BOUND = 1;
+ private static final int GLITCH_ITERATIONS_UPPER_BOUND = 1;
+
private static final String DEVICE_TEMP_DIR_PATH = "/sdcard/";
- private static final String OUTPUT_FILENAME = "output_" + System.currentTimeMillis();
- private static final String OUTPUT_RESULT_TXT_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + ".txt";
- private static final String OUTPUT_PNG_PATH = DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + ".png";
- private static final String OUTPUT_WAV_PATH = DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + ".wav";
- private static final String OUTPUT_PLAYER_BUFFER_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + "_playerBufferPeriod.txt";
- private static final String OUTPUT_PLAYER_BUFFER_PNG_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + "_playerBufferPeriod.png";
- private static final String OUTPUT_RECORDER_BUFFER_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + "_recorderBufferPeriod.txt";
- private static final String OUTPUT_RECORDER_BUFFER_PNG_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + "_recorderBufferPeriod.png";
- private static final String OUTPUT_GLITCH_PATH =
- DEVICE_TEMP_DIR_PATH + OUTPUT_FILENAME + "_glitchMillis.txt";
+ private static final String FMT_OUTPUT_PREFIX = "output_%1$d_" + System.currentTimeMillis();
+ private static final String FMT_DEVICE_FILENAME = FMT_OUTPUT_PREFIX + "%2$s";
+ private static final String FMT_DEVICE_PATH = DEVICE_TEMP_DIR_PATH + FMT_DEVICE_FILENAME;
+
private static final String AM_CMD =
"am start -n org.drrickorang.loopback/.LoopbackActivity"
+ " --ei SF %s --es FileName %s --ei MicSource %s --ei AudioThread %s"
+ " --ei AudioLevel %s --ei TestType %s --ei BufferTestDuration %s";
- private static Map<String, String> createMetricsKeyMap() {
- Map<String, String> result = new HashMap<>();
- result.put("LatencyMs", "latency_ms");
- result.put("LatencyConfidence", "latency_confidence");
- result.put("Recorder Benchmark", "recorder_benchmark");
- result.put("Recorder Number of Outliers", "recorder_outliers");
- result.put("Player Benchmark", "player_benchmark");
- result.put("Player Number of Outliers", "player_outliers");
- result.put("Total Number of Glitches", "number_of_glitches");
- result.put("kth% Late Recorder Buffer Callbacks", "late_recorder_callbacks");
- result.put("kth% Late Player Buffer Callbacks", "late_player_callbacks");
- result.put("Glitches Per Hour", "glitches_per_hour");
+ private static final String ERR_PARAMETER_OUT_OF_BOUNDS =
+ "Test parameter '%1$s' is out of bounds. Lower limit = %2$d, upper limit = %3$d";
+
+ private static final String KEY_RESULT_LATENCY_MS = "latency_ms";
+ private static final String KEY_RESULT_LATENCY_CONFIDENCE = "latency_confidence";
+ private static final String KEY_RESULT_RECORDER_BENCHMARK = "recorder_benchmark";
+ private static final String KEY_RESULT_RECORDER_OUTLIER = "recorder_outliers";
+ private static final String KEY_RESULT_PLAYER_BENCHMARK = "player_benchmark";
+ private static final String KEY_RESULT_PLAYER_OUTLIER = "player_outliers";
+ private static final String KEY_RESULT_NUMBER_OF_GLITCHES = "number_of_glitches";
+ private static final String KEY_RESULT_RECORDER_BUFFER_CALLBACK = "late_recorder_callbacks";
+ private static final String KEY_RESULT_PLAYER_BUFFER_CALLBACK = "late_player_callbacks";
+ private static final String KEY_RESULT_GLITCHES_PER_HOUR = "glitches_per_hour";
+ private static final String KEY_RESULT_TEST_STATUS = "test_status";
+ private static final String KEY_RESULT_AUDIO_LEVEL = "audio_level";
+ private static final String KEY_RESULT_RMS = "rms";
+ private static final String KEY_RESULT_RMS_AVERAGE = "rms_average";
+ private static final String KEY_RESULT_SAMPLING_FREQUENCY_CONFIDENCE = "sampling_frequency";
+ private static final String KEY_RESULT_PERIOD_CONFIDENCE = "period_confidence";
+ private static final String KEY_RESULT_SAMPLING_BLOCK_SIZE = "block_size";
+
+ private static final LogFileType[] LATENCY_TEST_LOGS = {
+ LogFileType.RESULT,
+ LogFileType.GRAPH,
+ LogFileType.WAVE,
+ LogFileType.PLAYER_BUFFER,
+ LogFileType.PLAYER_BUFFER_HISTOGRAM,
+ LogFileType.PLAYER_BUFFER_PERIOD_TIMES,
+ LogFileType.RECORDER_BUFFER,
+ LogFileType.RECORDER_BUFFER_HISTOGRAM,
+ LogFileType.RECORDER_BUFFER_PERIOD_TIMES,
+ LogFileType.LOGCAT
+ };
+
+ private static final LogFileType[] GLITCH_TEST_LOGS = {
+ LogFileType.RESULT,
+ LogFileType.GRAPH,
+ LogFileType.WAVE,
+ LogFileType.PLAYER_BUFFER,
+ LogFileType.PLAYER_BUFFER_HISTOGRAM,
+ LogFileType.PLAYER_BUFFER_PERIOD_TIMES,
+ LogFileType.RECORDER_BUFFER,
+ LogFileType.RECORDER_BUFFER_HISTOGRAM,
+ LogFileType.RECORDER_BUFFER_PERIOD_TIMES,
+ LogFileType.GLITCHES_MILLIS,
+ LogFileType.HEAT_MAP,
+ LogFileType.LOGCAT
+ };
+
+ /**
+ * The Audio Latency and Audio Glitch test deals with many various types of log files. To be
+ * able to generate log files in a generic manner, this map is provided to get access to log
+ * file properties like log name prefix, log name file extension and log type (leveraging
+ * tradefed class LogDataType, used when uploading log).
+ */
+ private final synchronized Map<LogFileType, LogFileData> getLogFileDataKeyMap() {
+ if (mFileDataKeyMap != null) {
+ return mFileDataKeyMap;
+ }
+
+ final Map<LogFileType, LogFileData> result = new HashMap<LogFileType, LogFileData>();
+
+ // Populate dictionary with info about various types of logfiles
+ LogFileData l = new LogFileData(".txt", "result", LogDataType.TEXT);
+ result.put(LogFileType.RESULT, l);
+
+ l = new LogFileData(".png", "graph", LogDataType.PNG);
+ result.put(LogFileType.GRAPH, l);
+
+ l = new LogFileData(".wav", "wave", LogDataType.UNKNOWN);
+ result.put(LogFileType.WAVE, l);
+
+ l = new LogFileData("_playerBufferPeriod.txt", "player_buffer", LogDataType.TEXT);
+ result.put(LogFileType.PLAYER_BUFFER, l);
+
+ l = new LogFileData("_playerBufferPeriod.png", "player_buffer_histogram", LogDataType.PNG);
+ result.put(LogFileType.PLAYER_BUFFER_HISTOGRAM, l);
+
+ String fileExtension = "_playerBufferPeriodTimes.txt";
+ String uploadName = "player_buffer_period_times";
+ l = new LogFileData(fileExtension, uploadName, LogDataType.TEXT);
+ result.put(LogFileType.PLAYER_BUFFER_PERIOD_TIMES, l);
+
+ l = new LogFileData("_recorderBufferPeriod.txt", "recorder_buffer", LogDataType.TEXT);
+ result.put(LogFileType.RECORDER_BUFFER, l);
+
+ fileExtension = "_recorderBufferPeriod.png";
+ uploadName = "recorder_buffer_histogram";
+ l = new LogFileData(fileExtension, uploadName, LogDataType.PNG);
+ result.put(LogFileType.RECORDER_BUFFER_HISTOGRAM, l);
+
+ fileExtension = "_recorderBufferPeriodTimes.txt";
+ uploadName = "recorder_buffer_period_times";
+ l = new LogFileData(fileExtension, uploadName, LogDataType.TEXT);
+ result.put(LogFileType.RECORDER_BUFFER_PERIOD_TIMES, l);
+
+ l = new LogFileData("_glitchMillis.txt", "glitches_millis", LogDataType.TEXT);
+ result.put(LogFileType.GLITCHES_MILLIS, l);
+
+
+ l = new LogFileData("_heatMap.png", "heat_map", LogDataType.PNG);
+ result.put(LogFileType.HEAT_MAP, l);
+
+ l = new LogFileData(".txt", "logcat", LogDataType.TEXT);
+ result.put(LogFileType.LOGCAT, l);
+
+ mFileDataKeyMap = Collections.unmodifiableMap(result);
+ return mFileDataKeyMap;
+ }
+
+ private static final Map<String, String> createMetricsKeyMap() {
+ final Map<String, String> result = new HashMap<String, String>();
+
+ result.put("LatencyMs", KEY_RESULT_LATENCY_MS);
+ result.put("LatencyConfidence", KEY_RESULT_LATENCY_CONFIDENCE);
+ result.put("SF", KEY_RESULT_SAMPLING_FREQUENCY_CONFIDENCE);
+ result.put("Recorder Benchmark", KEY_RESULT_RECORDER_BENCHMARK);
+ result.put("Recorder Number of Outliers", KEY_RESULT_RECORDER_OUTLIER);
+ result.put("Player Benchmark", KEY_RESULT_PLAYER_BENCHMARK);
+ result.put("Player Number of Outliers", KEY_RESULT_PLAYER_OUTLIER);
+ result.put("Total Number of Glitches", KEY_RESULT_NUMBER_OF_GLITCHES);
+ result.put("kth% Late Recorder Buffer Callbacks", KEY_RESULT_RECORDER_BUFFER_CALLBACK);
+ result.put("kth% Late Player Buffer Callbacks", KEY_RESULT_PLAYER_BUFFER_CALLBACK);
+ result.put("Glitches Per Hour", KEY_RESULT_GLITCHES_PER_HOUR);
+ result.put("Test Status", KEY_RESULT_TEST_STATUS);
+ result.put("AudioLevel", KEY_RESULT_AUDIO_LEVEL);
+ result.put("RMS", KEY_RESULT_RMS);
+ result.put("Average", KEY_RESULT_RMS_AVERAGE);
+ result.put("PeriodConfidence", KEY_RESULT_PERIOD_CONFIDENCE);
+ result.put("BS", KEY_RESULT_SAMPLING_BLOCK_SIZE);
+
return Collections.unmodifiableMap(result);
}
- /**
- * {@inheritDoc}
- */
+ //===================================================================
+ // ENUMS
+ //===================================================================
+ public enum TestType {
+ GLITCH,
+ LATENCY,
+ LATENCY_STRESS,
+ NONE
+ }
+
+ //===================================================================
+ // INNER CLASSES
+ //===================================================================
+ public final class LogFileData {
+ private String fileExtension;
+ private String filePrefix;
+ private LogDataType logDataType;
+
+ private LogFileData(String fileExtension, String filePrefix, LogDataType logDataType) {
+ this.fileExtension = fileExtension;
+ this.filePrefix = filePrefix;
+ this.logDataType = logDataType;
+ }
+ }
+
+ //===================================================================
+ // FUNCTIONS
+ //===================================================================
+
+ /** {@inheritDoc} */
@Override
public void setDevice(ITestDevice device) {
mDevice = device;
}
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public ITestDevice getDevice() {
return mDevice;
}
/**
- * {@inheritDoc}
+ * Test Entry Point
+ *
+ * <p>{@inheritDoc}
*/
@Override
public void run(ITestInvocationListener listener) throws DeviceNotAvailableException {
- TestIdentifier testId = new TestIdentifier(getClass().getCanonicalName(), mRunKey);
- ITestDevice device = getDevice();
- // Wait device to settle
- RunUtil.getDefault().sleep(DEVICE_SYNC_MS);
- listener.testRunStarted(mRunKey, 0);
- listener.testStarted(testId);
+ initializeTest(listener);
- long testStartTime = System.currentTimeMillis();
- Map<String, String> metrics = new HashMap<>();
+ mTestRunHelper.startTest(1);
- // start measurement and wait for result file
- CollectingOutputReceiver receiver = new CollectingOutputReceiver();
- device.unlockDevice();
- String loopbackCmd = String.format(
- AM_CMD, mSamplingFreq, OUTPUT_FILENAME, mMicSource, mAudioThread,
- mAudioLevel, mTestType, mBufferTestDuration);
- CLog.i("Running cmd: " + loopbackCmd);
- device.executeShellCommand(loopbackCmd, receiver,
- TIMEOUT_MS, TimeUnit.MILLISECONDS, MAX_ATTEMPTS);
- long timeout = Long.parseLong(mBufferTestDuration) * 1000 + TIMEOUT_MS;
- long loopbackStartTime = System.currentTimeMillis();
- boolean isTimedOut = false;
- boolean isResultGenerated = false;
- File loopbackReport = null;
- while (!isResultGenerated && !isTimedOut) {
- RunUtil.getDefault().sleep(POLLING_INTERVAL_MS);
- isTimedOut = (System.currentTimeMillis() - loopbackStartTime >= timeout);
- boolean isResultFileFound = device.doesFileExist(OUTPUT_RESULT_TXT_PATH);
- if (isResultFileFound) {
- loopbackReport = device.pullFile(OUTPUT_RESULT_TXT_PATH);
- if (loopbackReport.length() > 0) {
- isResultGenerated = true;
- }
- }
- }
-
- if (isTimedOut) {
- reportFailure(listener, testId, "Loopback result not found, timed out.");
- return;
- }
- // TODO: fail the test or rerun if the confidence level is too low
- // parse result
- CLog.i("== Loopback result ==");
try {
- Map<String, String> loopbackResult = parseResult(loopbackReport);
- if (loopbackResult == null || loopbackResult.size() == 0) {
- reportFailure(listener, testId, "Failed to parse Loopback result.");
+ if (!verifyTestParameters()) {
return;
}
- metrics = loopbackResult;
- listener.testLog(
- mKeyPrefix + "result",
- LogDataType.TEXT,
- new FileInputStreamSource(loopbackReport));
- File loopbackGraphFile = device.pullFile(OUTPUT_PNG_PATH);
- listener.testLog(
- mKeyPrefix + "graph",
- LogDataType.PNG,
- new FileInputStreamSource(loopbackGraphFile));
- File loopbackWaveFile = device.pullFile(OUTPUT_WAV_PATH);
- listener.testLog(
- mKeyPrefix + "wave",
- LogDataType.UNKNOWN,
- new FileInputStreamSource(loopbackWaveFile));
- if (mTestType.equals(TESTTYPE_BUFFER)) {
- File loopbackPlayerBuffer = device.pullFile(OUTPUT_PLAYER_BUFFER_PATH);
- listener.testLog(
- mKeyPrefix + "player_buffer",
- LogDataType.TEXT,
- new FileInputStreamSource(loopbackPlayerBuffer));
- File loopbackPlayerBufferPng = device.pullFile(OUTPUT_PLAYER_BUFFER_PNG_PATH);
- listener.testLog(
- mKeyPrefix + "player_buffer_histogram",
- LogDataType.PNG,
- new FileInputStreamSource(loopbackPlayerBufferPng));
- File loopbackRecorderBuffer = device.pullFile(OUTPUT_RECORDER_BUFFER_PATH);
- listener.testLog(
- mKeyPrefix + "recorder_buffer",
- LogDataType.TEXT,
- new FileInputStreamSource(loopbackRecorderBuffer));
- File loopbackRecorderBufferPng = device.pullFile(OUTPUT_RECORDER_BUFFER_PNG_PATH);
- listener.testLog(
- mKeyPrefix + "recorder_buffer_histogram",
- LogDataType.PNG,
- new FileInputStreamSource(loopbackRecorderBufferPng));
+ // Stop logcat logging so we can record one logcat log per iteration
+ getDevice().stopLogcat();
- File loopbackGlitch = device.pullFile(OUTPUT_GLITCH_PATH);
- listener.testLog(
- mKeyPrefix + "glitches_millis",
- LogDataType.TEXT,
- new FileInputStreamSource(loopbackGlitch));
+ // Run test iterations
+ for (int i = 0; i < mIterations; i++) {
+ CLog.i("---- Iteration " + i + " of " + (mIterations - 1) + " -----");
+
+ final ResultData d = new ResultData();
+ d.setIteration(i);
+ Map<String, String> resultsDictionary = null;
+ resultsDictionary = runTest(d, getSingleTestTimeoutValue());
+
+ mLoopbackTestHelper.addTestData(d, resultsDictionary);
}
- } catch (IOException ioe) {
- CLog.e(ioe.getMessage());
- reportFailure(listener, testId, "I/O error while parsing Loopback result.");
+
+ mLoopbackTestHelper.processTestData();
+ } finally {
+ Map<String, String> metrics = uploadLogsReturnMetrics(listener);
+ CLog.i("Uploading metrics values:\n" + Arrays.toString(metrics.entrySet().toArray()));
+ mTestRunHelper.endTest(metrics);
+ deleteAllTempFiles();
+ getDevice().startLogcat();
+ }
+ }
+
+ private void initializeTest(ITestInvocationListener listener)
+ throws UnsupportedOperationException, DeviceNotAvailableException {
+
+ mFileDataKeyMap = getLogFileDataKeyMap();
+ TestIdentifier testId = new TestIdentifier(getClass().getCanonicalName(), mRunKey);
+
+ // Allocate helpers
+ mTestRunHelper = new TestRunHelper(listener, testId);
+ mLoopbackTestHelper = new AudioLoopbackTestHelper(mIterations);
+
+ getDevice().disableKeyguard();
+ getDevice().waitForDeviceAvailable(DEVICE_SYNC_MS);
+
+ getDevice().setDate(new Date());
+ CLog.i("syncing device time to host time");
+ }
+
+ private Map<String, String> runTest(ResultData data, final long timeout)
+ throws DeviceNotAvailableException {
+
+ // start measurement and wait for result file
+ final NullOutputReceiver receiver = new NullOutputReceiver();
+
+ final String loopbackCmd = getTestCommand(data.getIteration());
+ CLog.i("Loopback cmd: " + loopbackCmd);
+
+ // Clear logcat
+ // Seems like getDevice().clearLogcat(); doesn't do anything?
+ // Do it through ADB
+ getDevice().executeAdbCommand("logcat", "-c");
+ final long deviceTestStartTime = getDevice().getDeviceDate();
+
+ getDevice()
+ .executeShellCommand(
+ loopbackCmd, receiver, TIMEOUT_MS, TimeUnit.MILLISECONDS, MAX_ATTEMPTS);
+
+ final long loopbackStartTime = System.currentTimeMillis();
+ File loopbackReport = null;
+
+ data.setDeviceTestStartTime(deviceTestStartTime);
+
+ // Try to retrieve result file from device.
+ final String resultFilename = getDeviceFilename(LogFileType.RESULT, data.getIteration());
+ do {
+ RunUtil.getDefault().sleep(POLLING_INTERVAL_MS);
+ if (getDevice().doesFileExist(resultFilename)) {
+ // Store device log files in tmp directory on Host and add to ResultData object
+ storeDeviceFilesOnHost(data);
+ final String reportFilename = data.getLogFile(LogFileType.RESULT);
+ if (reportFilename != null && !reportFilename.isEmpty()) {
+ loopbackReport = new File(reportFilename);
+ if (loopbackReport.length() > 0) {
+ break;
+ }
+ }
+ }
+
+ data.setIsTimedOut(System.currentTimeMillis() - loopbackStartTime >= timeout);
+ } while (!data.hasLogFile(LogFileType.RESULT) && !data.isTimedOut());
+
+ // Grab logcat for iteration
+ final InputStreamSource lc = getDevice().getLogcatSince(deviceTestStartTime);
+ saveLogcatForIteration(data, lc, data.getIteration());
+
+ // Check if test timed out. If so, don't fail the test, but return to upper logic.
+ // We accept certain number of individual test timeouts.
+ if (data.isTimedOut()) {
+ // No device result files retrieved, so no need to parse
+ return null;
+ }
+
+ // parse result
+ Map<String, String> loopbackResult = null;
+
+ try {
+ loopbackResult =
+ AudioLoopbackTestHelper.parseKeyValuePairFromFile(
+ loopbackReport, METRICS_KEY_MAP, mKeyPrefix, "=", "%s: %s");
+ populateResultData(loopbackResult, data);
+
+ // Trust but verify, so get Audio Level from ADB and compare to value from app
+ final int adbAudioLevel =
+ AudioLevelUtility.extractDeviceAudioLevelFromAdbShell(getDevice());
+ if (data.getAudioLevel() != adbAudioLevel) {
+ final String errMsg =
+ String.format(
+ "App Audio Level (%1$d)differs from ADB level (%2$d)",
+ data.getAudioLevel(), adbAudioLevel);
+ mTestRunHelper.reportFailure(errMsg);
+ }
+ } catch (final IOException ioe) {
+ CLog.e(ioe);
+ mTestRunHelper.reportFailure("I/O error while parsing Loopback result.");
+ } catch (final NumberFormatException ne) {
+ CLog.e(ne);
+ mTestRunHelper.reportFailure("Number format error parsing Loopback result.");
+ }
+
+ return loopbackResult;
+ }
+
+ private String getMetricsKey(final String key) {
+ return mKeyPrefix + key;
+ }
+ private final long getSingleTestTimeoutValue() {
+ return Long.parseLong(mBufferTestDuration) * 1000 + TIMEOUT_MS;
+ }
+
+ private Map<String, String> uploadLogsReturnMetrics(ITestInvocationListener listener)
+ throws DeviceNotAvailableException {
+
+ // "resultDictionary" is used to post results to dashboards like BlackBox
+ // "results" contains test logs to be uploaded; i.e. to Sponge
+
+ List<ResultData> results = null;
+ Map<String, String> resultDictionary = new HashMap<String, String>();
+
+ switch (getTestType()) {
+ case GLITCH:
+ resultDictionary = mLoopbackTestHelper.getResultDictionaryForIteration(0);
+ // Upload all test files to be backward compatible with old test
+ results = mLoopbackTestHelper.getAllTestData();
+ break;
+ case LATENCY:
+ {
+ final int nrOfValidResults = mLoopbackTestHelper.processTestData();
+ if (nrOfValidResults == 0) {
+ mTestRunHelper.reportFailure("No good data was collected");
+ } else {
+ // use dictionary collected from single test run
+ resultDictionary = mLoopbackTestHelper.getResultDictionaryForIteration(0);
+ }
+
+ // Upload all test files to be backward compatible with old test
+ results = mLoopbackTestHelper.getAllTestData();
+ }
+ break;
+ case LATENCY_STRESS:
+ {
+ final int nrOfValidResults = mLoopbackTestHelper.processTestData();
+ if (nrOfValidResults == 0) {
+ mTestRunHelper.reportFailure("No good data was collected");
+ } else {
+ mLoopbackTestHelper.populateStressTestMetrics(resultDictionary, mKeyPrefix);
+ }
+
+ results = mLoopbackTestHelper.getWorstResults(MAX_NR_OF_LOG_UPLOADS);
+
+ // Save all test data in a spreadsheet style csv file for post test analysis
+ try {
+ saveResultsAsCSVFile(listener);
+ } catch (final IOException e) {
+ CLog.e(e);
+ }
+ }
+ break;
+ default:
+ break;
+ }
+
+ // Upload relevant logs
+ for (final ResultData d : results) {
+ final LogFileType[] logFileTypes = getLogFileTypesForCurrentTest();
+ for (final LogFileType logType : logFileTypes) {
+ uploadLog(listener, logType, d);
+ }
+ }
+
+ return resultDictionary;
+ }
+
+ private TestType getTestType() {
+ if (mTestType.equals(TESTTYPE_GLITCH_STR)) {
+ if (GLITCH_ITERATIONS_LOWER_BOUND <= mIterations
+ && mIterations <= GLITCH_ITERATIONS_UPPER_BOUND) {
+ return TestType.GLITCH;
+ }
+ }
+
+ if (mTestType.equals(TESTTYPE_LATENCY_STR)) {
+ if (mIterations == 1) {
+ return TestType.LATENCY;
+ }
+
+ if (LATENCY_ITERATIONS_LOWER_BOUND <= mIterations
+ && mIterations <= LATENCY_ITERATIONS_UPPER_BOUND) {
+ return TestType.LATENCY_STRESS;
+ }
+ }
+
+ return TestType.NONE;
+ }
+
+ private boolean verifyTestParameters() {
+ if (getTestType() != TestType.NONE) {
+ return true;
+ }
+
+ if (mTestType.equals(TESTTYPE_GLITCH_STR)
+ && (mIterations < GLITCH_ITERATIONS_LOWER_BOUND
+ || mIterations > GLITCH_ITERATIONS_UPPER_BOUND)) {
+ final String error =
+ String.format(
+ ERR_PARAMETER_OUT_OF_BOUNDS,
+ "iterations",
+ GLITCH_ITERATIONS_LOWER_BOUND,
+ GLITCH_ITERATIONS_UPPER_BOUND);
+ mTestRunHelper.reportFailure(error);
+ return false;
+ }
+
+ if (mTestType.equals(TESTTYPE_LATENCY_STR)
+ && (mIterations < LATENCY_ITERATIONS_LOWER_BOUND
+ || mIterations > LATENCY_ITERATIONS_UPPER_BOUND)) {
+ final String error =
+ String.format(
+ ERR_PARAMETER_OUT_OF_BOUNDS,
+ "iterations",
+ LATENCY_ITERATIONS_LOWER_BOUND,
+ LATENCY_ITERATIONS_UPPER_BOUND);
+ mTestRunHelper.reportFailure(error);
+ return false;
+ }
+
+ return true;
+ }
+
+ private void populateResultData(final Map<String, String> results, ResultData data) {
+ if (results == null || results.isEmpty()) {
return;
}
- long durationMs = System.currentTimeMillis() - testStartTime;
- listener.testEnded(testId, metrics);
- listener.testRunEnded(durationMs, metrics);
- }
-
- /**
- * Report failure with error message specified and fail the test.
- *
- * @param listener
- * @param testId
- * @param errMsg
- */
- private void reportFailure(ITestInvocationListener listener, TestIdentifier testId,
- String errMsg) {
- CLog.e(errMsg);
- listener.testFailed(testId, errMsg);
- listener.testEnded(testId, new HashMap<String, String>());
- listener.testRunFailed(errMsg);
- }
-
- /**
- * Parse result.
- * Format: key = value
- *
- * @param result Loopback app result file
- * @return a {@link HashMap} that contains metrics keys and results
- * @throws IOException
- */
- private Map<String, String> parseResult(File result) throws IOException {
- Map<String, String> resultMap = new HashMap<>();
- BufferedReader br = new BufferedReader(new FileReader(result));
- try {
- String line = br.readLine();
- while (line != null) {
- line = line.trim().replaceAll(" +", " ");
- String[] tokens = line.split("=");
- if (tokens.length >= 2) {
- String metricName = tokens[0].trim();
- String metricValue = tokens[1].trim();
- if (METRICS_KEY_MAP.containsKey(metricName)) {
- CLog.i(String.format("%s: %s", metricName, metricValue));
- resultMap.put(mKeyPrefix + METRICS_KEY_MAP.get(metricName), metricValue);
- }
- }
- line = br.readLine();
- }
- } finally {
- br.close();
+ String key = getMetricsKey(KEY_RESULT_LATENCY_MS);
+ if (results.containsKey(key)) {
+ data.setLatency(Float.parseFloat(results.get(key)));
}
- return resultMap;
+
+ key = getMetricsKey(KEY_RESULT_LATENCY_CONFIDENCE);
+ if (results.containsKey(key)) {
+ data.setConfidence(Float.parseFloat(results.get(key)));
+ }
+
+ key = getMetricsKey(KEY_RESULT_AUDIO_LEVEL);
+ if (results.containsKey(key)) {
+ data.setAudioLevel(Integer.parseInt(results.get(key)));
+ }
+
+ key = getMetricsKey(KEY_RESULT_RMS);
+ if (results.containsKey(key)) {
+ data.setRMS(Float.parseFloat(results.get(key)));
+ }
+
+ key = getMetricsKey(KEY_RESULT_RMS_AVERAGE);
+ if (results.containsKey(key)) {
+ data.setRMSAverage(Float.parseFloat(results.get(key)));
+ }
+
+ key = getMetricsKey(KEY_RESULT_PERIOD_CONFIDENCE);
+ if (results.containsKey(key)) {
+ data.setPeriodConfidence(Float.parseFloat(results.get(key)));
+ }
+
+ key = getMetricsKey(KEY_RESULT_SAMPLING_BLOCK_SIZE);
+ if (results.containsKey(key)) {
+ data.setBlockSize(Integer.parseInt(results.get(key)));
+ }
}
-}
\ No newline at end of file
+
+ private void storeDeviceFilesOnHost(ResultData data) throws DeviceNotAvailableException {
+ final int iteration = data.getIteration();
+ for (final LogFileType log : getLogFileTypesForCurrentTest()) {
+ if (getDevice().doesFileExist(getDeviceFilename(log, iteration))) {
+ final String deviceFileName = getDeviceFilename(log, iteration);
+ final File logFile = getDevice().pullFile(deviceFileName);
+ data.setLogFile(log, logFile.getAbsolutePath());
+ CLog.i("Delete file from device: " + deviceFileName);
+ deleteFileFromDevice(deviceFileName);
+ }
+ }
+ }
+
+ private void deleteAllTempFiles() {
+ for (final ResultData d : mLoopbackTestHelper.getAllTestData()) {
+ final LogFileType[] logFileTypes = getLogFileTypesForCurrentTest();
+ for (final LogFileType logType : logFileTypes) {
+ final String logFilename = d.getLogFile(logType);
+ if (logFilename == null || logFilename.isEmpty()) {
+ CLog.e("Logfile not found for LogFileType=" + logType.name());
+ } else {
+ FileUtil.deleteFile(new File(logFilename));
+ }
+ }
+ }
+ }
+
+ private void deleteFileFromDevice(String deviceFileName) throws DeviceNotAvailableException {
+ getDevice().executeShellCommand("rm -f " + deviceFileName);
+ }
+
+ private final LogFileType[] getLogFileTypesForCurrentTest() {
+ switch (getTestType()) {
+ case GLITCH:
+ return GLITCH_TEST_LOGS;
+ case LATENCY:
+ case LATENCY_STRESS:
+ return LATENCY_TEST_LOGS;
+ default:
+ return null;
+ }
+ }
+
+ private String getKeyPrefixForIteration(int iteration) {
+ if (mIterations == 1) {
+ // If only one run, skip the iteration number
+ return mKeyPrefix;
+ }
+ return mKeyPrefix + iteration + "_";
+ }
+
+ private String getDeviceFilename(LogFileType key, int iteration) {
+ final Map<LogFileType, LogFileData> map = getLogFileDataKeyMap();
+ if (map.containsKey(key)) {
+ final LogFileData data = map.get(key);
+ return String.format(FMT_DEVICE_PATH, iteration, data.fileExtension);
+ }
+ return null;
+ }
+
+ private void uploadLog(ITestInvocationListener listener, LogFileType key, ResultData data) {
+ final Map<LogFileType, LogFileData> map = getLogFileDataKeyMap();
+ if (!map.containsKey(key)) {
+ return;
+ }
+
+ final LogFileData logInfo = map.get(key);
+ final String prefix = getKeyPrefixForIteration(data.getIteration()) + logInfo.filePrefix;
+ final LogDataType logDataType = logInfo.logDataType;
+ final String logFilename = data.getLogFile(key);
+ if (logFilename == null || logFilename.isEmpty()) {
+ CLog.e("Logfile not found for LogFileType=" + key.name());
+ } else {
+ File logFile = new File(logFilename);
+ InputStreamSource iss = new FileInputStreamSource(logFile);
+ listener.testLog(prefix, logDataType, iss);
+
+ // cleanup
+ iss.cancel();
+ }
+ }
+
+ private void saveLogcatForIteration(ResultData data, InputStreamSource logcat, int iteration) {
+ if (logcat == null) {
+ CLog.i("Logcat could not be saved for iteration " + iteration);
+ return;
+ }
+
+ //create a temp file
+ File temp;
+ try {
+ temp = FileUtil.createTempFile("logcat_" + iteration + "_", ".txt");
+ data.setLogFile(LogFileType.LOGCAT, temp.getAbsolutePath());
+
+ // Copy logcat data into temp file
+ Files.copy(logcat.createInputStream(), temp.toPath(), REPLACE_EXISTING);
+ logcat.cancel();
+ } catch (final IOException e) {
+ CLog.i("Error when saving logcat for iteration=" + iteration);
+ CLog.e(e);
+ }
+ }
+
+ private void saveResultsAsCSVFile(ITestInvocationListener listener)
+ throws DeviceNotAvailableException, IOException {
+ final File csvTmpFile = File.createTempFile("audio_test_data", "csv");
+ mLoopbackTestHelper.writeAllResultsToCSVFile(csvTmpFile, getDevice());
+ InputStreamSource iss = new FileInputStreamSource(csvTmpFile);
+ listener.testLog("audio_test_data", LogDataType.JACOCO_CSV, iss);
+
+ // cleanup
+ iss.cancel();
+ csvTmpFile.delete();
+ }
+
+ private String getTestCommand(int currentIteration) {
+ return String.format(
+ AM_CMD,
+ mSamplingFreq,
+ String.format(FMT_OUTPUT_PREFIX, currentIteration),
+ mMicSource,
+ mAudioThread,
+ mAudioLevel,
+ mTestType,
+ mBufferTestDuration);
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/AudioLoopbackTestHelper.java b/prod-tests/src/com/android/media/tests/AudioLoopbackTestHelper.java
new file mode 100644
index 0000000..4e9c2b0
--- /dev/null
+++ b/prod-tests/src/com/android/media/tests/AudioLoopbackTestHelper.java
@@ -0,0 +1,608 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.tests;
+
+import com.android.media.tests.AudioLoopbackImageAnalyzer.Result;
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.util.Pair;
+
+import com.google.common.io.Files;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Helper class for AudioLoopbackTest. It keeps runtime data, analytics, */
+public class AudioLoopbackTestHelper {
+
+ private StatisticsData mLatencyStats = null;
+ private StatisticsData mConfidenceStats = null;
+ private ArrayList<ResultData> mAllResults;
+ private ArrayList<ResultData> mGoodResults = new ArrayList<ResultData>();
+ private ArrayList<ResultData> mBadResults = new ArrayList<ResultData>();
+ private ArrayList<Map<String, String>> mResultDictionaries =
+ new ArrayList<Map<String, String>>();
+
+ // Controls acceptable tolerance in ms around median latency
+ private static final double TOLERANCE = 2.0;
+
+ //===================================================================
+ // ENUMS
+ //===================================================================
+ public enum LogFileType {
+ RESULT,
+ WAVE,
+ GRAPH,
+ PLAYER_BUFFER,
+ PLAYER_BUFFER_HISTOGRAM,
+ PLAYER_BUFFER_PERIOD_TIMES,
+ RECORDER_BUFFER,
+ RECORDER_BUFFER_HISTOGRAM,
+ RECORDER_BUFFER_PERIOD_TIMES,
+ GLITCHES_MILLIS,
+ HEAT_MAP,
+ LOGCAT
+ }
+
+ //===================================================================
+ // INNER CLASSES
+ //===================================================================
+ private class StatisticsData {
+ double mMin = 0;
+ double mMax = 0;
+ double mMean = 0;
+ double mMedian = 0;
+
+ @Override
+ public String toString() {
+ return String.format(
+ "min = %1$f, max = %2$f, median=%3$f, mean = %4$f", mMin, mMax, mMedian, mMean);
+ }
+ }
+
+ /** ResultData is an inner class that holds results and logfile info from each test run */
+ public static class ResultData {
+ private Float mLatencyMs;
+ private Float mLatencyConfidence;
+ private Integer mAudioLevel;
+ private Integer mIteration;
+ private Long mDeviceTestStartTime;
+ private boolean mIsTimedOut = false;
+ private HashMap<LogFileType, String> mLogs = new HashMap<LogFileType, String>();
+ private Result mImageAnalyzerResult = Result.UNKNOWN;
+ private String mFailureReason = null;
+
+ // Optional
+ private Float mPeriodConfidence = Float.valueOf(0.0f);
+ private Float mRms = Float.valueOf(0.0f);
+ private Float mRmsAverage = Float.valueOf(0.0f);
+ private Integer mBblockSize = Integer.valueOf(0);
+
+ public float getLatency() {
+ return mLatencyMs.floatValue();
+ }
+
+ public void setLatency(float latencyMs) {
+ this.mLatencyMs = Float.valueOf(latencyMs);
+ }
+
+ public float getConfidence() {
+ return mLatencyConfidence.floatValue();
+ }
+
+ public void setConfidence(float latencyConfidence) {
+ this.mLatencyConfidence = Float.valueOf(latencyConfidence);
+ }
+
+ public float getPeriodConfidence() {
+ return mPeriodConfidence.floatValue();
+ }
+
+ public void setPeriodConfidence(float periodConfidence) {
+ this.mPeriodConfidence = Float.valueOf(periodConfidence);
+ }
+
+ public float getRMS() {
+ return mRms.floatValue();
+ }
+
+ public void setRMS(float rms) {
+ this.mRms = Float.valueOf(rms);
+ }
+
+ public float getRMSAverage() {
+ return mRmsAverage.floatValue();
+ }
+
+ public void setRMSAverage(float rmsAverage) {
+ this.mRmsAverage = Float.valueOf(rmsAverage);
+ }
+
+ public int getAudioLevel() {
+ return mAudioLevel.intValue();
+ }
+
+ public void setAudioLevel(int audioLevel) {
+ this.mAudioLevel = Integer.valueOf(audioLevel);
+ }
+
+ public int getBlockSize() {
+ return mBblockSize.intValue();
+ }
+
+ public void setBlockSize(int blockSize) {
+ this.mBblockSize = Integer.valueOf(blockSize);
+ }
+
+ public int getIteration() {
+ return mIteration.intValue();
+ }
+
+ public void setIteration(int iteration) {
+ this.mIteration = Integer.valueOf(iteration);
+ }
+
+ public long getDeviceTestStartTime() {
+ return mDeviceTestStartTime.longValue();
+ }
+
+ public void setDeviceTestStartTime(long deviceTestStartTime) {
+ this.mDeviceTestStartTime = Long.valueOf(deviceTestStartTime);
+ }
+
+ public Result getImageAnalyzerResult() {
+ return mImageAnalyzerResult;
+ }
+
+ public void setImageAnalyzerResult(Result imageAnalyzerResult) {
+ this.mImageAnalyzerResult = imageAnalyzerResult;
+ }
+
+ public String getFailureReason() {
+ return mFailureReason;
+ }
+
+ public void setFailureReason(String failureReason) {
+ this.mFailureReason = failureReason;
+ }
+
+ public boolean isTimedOut() {
+ return mIsTimedOut;
+ }
+
+ public void setIsTimedOut(boolean isTimedOut) {
+ this.mIsTimedOut = isTimedOut;
+ }
+
+ public String getLogFile(LogFileType log) {
+ return mLogs.get(log);
+ }
+
+ public void setLogFile(LogFileType log, String filename) {
+ CLog.i("setLogFile: type=" + log.name() + ", filename=" + filename);
+ if (!mLogs.containsKey(log) && filename != null && !filename.isEmpty()) {
+ mLogs.put(log, filename);
+ }
+ }
+
+ public boolean hasBadResults() {
+ return hasTimedOut()
+ || hasNoTestResults()
+ || hasNoLatencyResult()
+ || hasNoLatencyConfidence()
+ || mImageAnalyzerResult == Result.FAIL;
+ }
+
+ public boolean hasTimedOut() {
+ return mIsTimedOut;
+ }
+
+ public boolean hasLogFile(LogFileType log) {
+ return mLogs.containsKey(log);
+ }
+
+ public boolean hasNoLatencyResult() {
+ return mLatencyMs == null;
+ }
+
+ public boolean hasNoLatencyConfidence() {
+ return mLatencyConfidence == null;
+ }
+
+ public boolean hasNoTestResults() {
+ return hasNoLatencyConfidence() && hasNoLatencyResult();
+ }
+
+ public static Comparator<ResultData> latencyComparator =
+ new Comparator<ResultData>() {
+ @Override
+ public int compare(ResultData o1, ResultData o2) {
+ return o1.mLatencyMs.compareTo(o2.mLatencyMs);
+ }
+ };
+
+ public static Comparator<ResultData> confidenceComparator =
+ new Comparator<ResultData>() {
+ @Override
+ public int compare(ResultData o1, ResultData o2) {
+ return o1.mLatencyConfidence.compareTo(o2.mLatencyConfidence);
+ }
+ };
+
+ public static Comparator<ResultData> iteratorComparator =
+ new Comparator<ResultData>() {
+ @Override
+ public int compare(ResultData o1, ResultData o2) {
+ return Integer.compare(o1.mIteration, o2.mIteration);
+ }
+ };
+
+ @Override
+ public String toString() {
+ final String NL = "\n";
+ final StringBuilder sb = new StringBuilder(512);
+ sb.append("{").append(NL);
+ sb.append("{\nlatencyMs=").append(mLatencyMs).append(NL);
+ sb.append("latencyConfidence=").append(mLatencyConfidence).append(NL);
+ sb.append("isTimedOut=").append(mIsTimedOut).append(NL);
+ sb.append("iteration=").append(mIteration).append(NL);
+ sb.append("logs=").append(Arrays.toString(mLogs.values().toArray())).append(NL);
+ sb.append("audioLevel=").append(mAudioLevel).append(NL);
+ sb.append("deviceTestStartTime=").append(mDeviceTestStartTime).append(NL);
+ sb.append("rms=").append(mRms).append(NL);
+ sb.append("rmsAverage=").append(mRmsAverage).append(NL);
+ sb.append("}").append(NL);
+ return sb.toString();
+ }
+ }
+
+ public AudioLoopbackTestHelper(int iterations) {
+ mAllResults = new ArrayList<ResultData>(iterations);
+ }
+
+ public void addTestData(ResultData data, Map<String, String> resultDictionary) {
+ mResultDictionaries.add(data.getIteration(), resultDictionary);
+ mAllResults.add(data);
+
+ // Analyze captured screenshot to see if wave form is within reason
+ final String screenshot = data.getLogFile(LogFileType.GRAPH);
+ final Pair<Result, String> result = AudioLoopbackImageAnalyzer.analyzeImage(screenshot);
+ data.setImageAnalyzerResult(result.first);
+ data.setFailureReason(result.second);
+ }
+
+ public final List<ResultData> getAllTestData() {
+ return mAllResults;
+ }
+
+ public Map<String, String> getResultDictionaryForIteration(int i) {
+ return mResultDictionaries.get(i);
+ }
+
+ /**
+ * Returns a list of the worst test result objects, up to maxNrOfWorstResults
+ *
+ * <p>
+ *
+ * <ol>
+ * <li> Tests in the bad results list are added first
+ * <li> If still space, add test results based on low confidence and then tests that are
+ * outside tolerance boundaries
+ * </ol>
+ *
+ * @param maxNrOfWorstResults
+ * @return list of worst test result objects
+ */
+ public List<ResultData> getWorstResults(int maxNrOfWorstResults) {
+ int counter = 0;
+ final ArrayList<ResultData> worstResults = new ArrayList<ResultData>(maxNrOfWorstResults);
+
+ for (final ResultData data : mBadResults) {
+ if (counter < maxNrOfWorstResults) {
+ worstResults.add(data);
+ counter++;
+ }
+ }
+
+ for (final ResultData data : mGoodResults) {
+ if (counter < maxNrOfWorstResults) {
+ boolean failed = false;
+ if (data.getConfidence() < 1.0f) {
+ data.setFailureReason("Low confidence");
+ failed = true;
+ } else if (data.getLatency() < (mLatencyStats.mMedian - TOLERANCE)
+ || data.getLatency() > (mLatencyStats.mMedian + TOLERANCE)) {
+ data.setFailureReason("Latency not within tolerance from median");
+ failed = true;
+ }
+
+ if (failed) {
+ worstResults.add(data);
+ counter++;
+ }
+ }
+ }
+
+ return worstResults;
+ }
+
+ public static Map<String, String> parseKeyValuePairFromFile(
+ File result,
+ final Map<String, String> dictionary,
+ final String resultKeyPrefix,
+ final String splitOn,
+ final String keyValueFormat)
+ throws IOException {
+
+ final Map<String, String> resultMap = new HashMap<String, String>();
+ final BufferedReader br = Files.newReader(result, StandardCharsets.UTF_8);
+
+ try {
+ String line = br.readLine();
+ while (line != null) {
+ line = line.trim().replaceAll(" +", " ");
+ final String[] tokens = line.split(splitOn);
+ if (tokens.length >= 2) {
+ final String key = tokens[0].trim();
+ final String value = tokens[1].trim();
+ if (dictionary.containsKey(key)) {
+ CLog.i(String.format(keyValueFormat, key, value));
+ resultMap.put(resultKeyPrefix + dictionary.get(key), value);
+ }
+ }
+ line = br.readLine();
+ }
+ } finally {
+ br.close();
+ }
+ return resultMap;
+ }
+
+ public int processTestData() {
+
+ // Collect statistics about the test run
+ int nrOfValidResults = 0;
+ double sumLatency = 0;
+ double sumConfidence = 0;
+
+ final int totalNrOfTests = mAllResults.size();
+ mLatencyStats = new StatisticsData();
+ mConfidenceStats = new StatisticsData();
+ mBadResults = new ArrayList<ResultData>();
+ mGoodResults = new ArrayList<ResultData>(totalNrOfTests);
+
+ // Copy all results into Good results list
+ mGoodResults.addAll(mAllResults);
+
+ for (final ResultData data : mAllResults) {
+ if (data.hasBadResults()) {
+ mBadResults.add(data);
+ continue;
+ }
+ // Get mean values
+ sumLatency += data.getLatency();
+ sumConfidence += data.getConfidence();
+ }
+
+ if (!mBadResults.isEmpty()) {
+ analyzeBadResults(mBadResults, mAllResults.size());
+ }
+
+ // Remove bad runs from result array
+ mGoodResults.removeAll(mBadResults);
+
+ // Fail test immediately if we don't have ANY good results
+ if (mGoodResults.isEmpty()) {
+ return 0;
+ }
+
+ nrOfValidResults = mGoodResults.size();
+
+ // ---- LATENCY: Get Median, Min and Max values ----
+ Collections.sort(mGoodResults, ResultData.latencyComparator);
+
+ mLatencyStats.mMin = mGoodResults.get(0).mLatencyMs;
+ mLatencyStats.mMax = mGoodResults.get(nrOfValidResults - 1).mLatencyMs;
+ mLatencyStats.mMean = sumLatency / nrOfValidResults;
+ // Is array even or odd numbered
+ if (nrOfValidResults % 2 == 0) {
+ final int middle = nrOfValidResults / 2;
+ final float middleLeft = mGoodResults.get(middle - 1).mLatencyMs;
+ final float middleRight = mGoodResults.get(middle).mLatencyMs;
+ mLatencyStats.mMedian = (middleLeft + middleRight) / 2.0f;
+ } else {
+ // It's and odd numbered array, just grab the middle value
+ mLatencyStats.mMedian = mGoodResults.get(nrOfValidResults / 2).mLatencyMs;
+ }
+
+ // ---- CONFIDENCE: Get Median, Min and Max values ----
+ Collections.sort(mGoodResults, ResultData.confidenceComparator);
+
+ mConfidenceStats.mMin = mGoodResults.get(0).mLatencyConfidence;
+ mConfidenceStats.mMax = mGoodResults.get(nrOfValidResults - 1).mLatencyConfidence;
+ mConfidenceStats.mMean = sumConfidence / nrOfValidResults;
+ // Is array even or odd numbered
+ if (nrOfValidResults % 2 == 0) {
+ final int middle = nrOfValidResults / 2;
+ final float middleLeft = mGoodResults.get(middle - 1).mLatencyConfidence;
+ final float middleRight = mGoodResults.get(middle).mLatencyConfidence;
+ mConfidenceStats.mMedian = (middleLeft + middleRight) / 2.0f;
+ } else {
+ // It's and odd numbered array, just grab the middle value
+ mConfidenceStats.mMedian = mGoodResults.get(nrOfValidResults / 2).mLatencyConfidence;
+ }
+
+ for (final ResultData data : mGoodResults) {
+ // Check if within Latency Tolerance
+ if (data.getConfidence() < 1.0f) {
+ data.setFailureReason("Low confidence");
+ } else if (data.getLatency() < (mLatencyStats.mMedian - TOLERANCE)
+ || data.getLatency() > (mLatencyStats.mMedian + TOLERANCE)) {
+ data.setFailureReason("Latency not within tolerance from median");
+ }
+ }
+
+ // Create histogram
+ // Strategy: Create buckets based on whole ints, like 16 ms, 17 ms, 18 ms etc. Count how
+ // many tests fall into each bucket. Just cast the float to an int, no rounding up/down
+ // required.
+ final int[] histogram = new int[(int) mLatencyStats.mMax + 1];
+ for (final ResultData rd : mGoodResults) {
+ // Increase value in bucket
+ histogram[(int) (rd.mLatencyMs.floatValue())]++;
+ }
+
+ CLog.i("========== VALID RESULTS ============================================");
+ CLog.i(String.format("Valid tests: %1$d of %2$d", nrOfValidResults, totalNrOfTests));
+ CLog.i("Latency: " + mLatencyStats.toString());
+ CLog.i("Confidence: " + mConfidenceStats.toString());
+ CLog.i("========== HISTOGRAM ================================================");
+ for (int i = 0; i < histogram.length; i++) {
+ if (histogram[i] > 0) {
+ CLog.i(String.format("%1$01d ms => %2$d", i, histogram[i]));
+ }
+ }
+
+ // VERIFY the good results by running image analysis on the
+ // screenshot of the incoming audio waveform
+
+ return nrOfValidResults;
+ }
+
+ public void writeAllResultsToCSVFile(File csvFile, ITestDevice device)
+ throws DeviceNotAvailableException, FileNotFoundException,
+ UnsupportedEncodingException {
+
+ final String deviceType = device.getProperty("ro.build.product");
+ final String buildId = device.getBuildAlias();
+ final String serialNumber = device.getSerialNumber();
+
+ // Sort data on iteration
+ Collections.sort(mAllResults, ResultData.iteratorComparator);
+
+ final StringBuilder sb = new StringBuilder(256);
+ final PrintWriter writer = new PrintWriter(csvFile, StandardCharsets.UTF_8.name());
+ final String SEPARATOR = ",";
+
+ // Write column labels
+ writer.println(
+ "Device Time,Device Type,Build Id,Serial Number,Iteration,Latency,"
+ + "Confidence,Period Confidence,Block Size,Audio Level,RMS,RMS Average,"
+ + "Image Analysis,Failure Reason");
+ for (final ResultData data : mAllResults) {
+ final Instant instant = Instant.ofEpochSecond(data.mDeviceTestStartTime);
+
+ sb.append(instant).append(SEPARATOR);
+ sb.append(deviceType).append(SEPARATOR);
+ sb.append(buildId).append(SEPARATOR);
+ sb.append(serialNumber).append(SEPARATOR);
+ sb.append(data.getIteration()).append(SEPARATOR);
+ sb.append(data.getLatency()).append(SEPARATOR);
+ sb.append(data.getConfidence()).append(SEPARATOR);
+ sb.append(data.getPeriodConfidence()).append(SEPARATOR);
+ sb.append(data.getBlockSize()).append(SEPARATOR);
+ sb.append(data.getAudioLevel()).append(SEPARATOR);
+ sb.append(data.getRMS()).append(SEPARATOR);
+ sb.append(data.getRMSAverage()).append(SEPARATOR);
+ sb.append(data.getImageAnalyzerResult().name()).append(SEPARATOR);
+ sb.append(data.getFailureReason());
+
+ writer.println(sb.toString());
+
+ sb.setLength(0);
+ }
+ writer.close();
+ }
+
+ private void analyzeBadResults(ArrayList<ResultData> badResults, int totalNrOfTests) {
+ int testNoData = 0;
+ int testTimeoutCounts = 0;
+ int testResultsNotFoundCounts = 0;
+ int testWithoutLatencyResultCount = 0;
+ int testWithoutConfidenceResultCount = 0;
+
+ for (final ResultData data : badResults) {
+ if (data.hasTimedOut()) {
+ testTimeoutCounts++;
+ testNoData++;
+ continue;
+ }
+
+ if (data.hasNoTestResults()) {
+ testResultsNotFoundCounts++;
+ testNoData++;
+ continue;
+ }
+
+ if (data.hasNoLatencyResult()) {
+ testWithoutLatencyResultCount++;
+ testNoData++;
+ continue;
+ }
+
+ if (data.hasNoLatencyConfidence()) {
+ testWithoutConfidenceResultCount++;
+ testNoData++;
+ continue;
+ }
+ }
+
+ CLog.i("========== BAD RESULTS ============================================");
+ CLog.i(String.format("No Data: %1$d of %2$d", testNoData, totalNrOfTests));
+ CLog.i(String.format("Timed out: %1$d of %2$d", testTimeoutCounts, totalNrOfTests));
+ CLog.i(
+ String.format(
+ "No results: %1$d of %2$d", testResultsNotFoundCounts, totalNrOfTests));
+ CLog.i(
+ String.format(
+ "No Latency results: %1$d of %2$d",
+ testWithoutLatencyResultCount, totalNrOfTests));
+ CLog.i(
+ String.format(
+ "No Confidence results: %1$d of %2$d",
+ testWithoutConfidenceResultCount, totalNrOfTests));
+ }
+
+ /** Generates metrics dictionary for stress test */
+ public void populateStressTestMetrics(
+ Map<String, String> metrics, final String resultKeyPrefix) {
+ metrics.put(resultKeyPrefix + "total_nr_of_tests", Integer.toString(mAllResults.size()));
+ metrics.put(resultKeyPrefix + "nr_of_good_tests", Integer.toString(mGoodResults.size()));
+ metrics.put(resultKeyPrefix + "latency_max", Double.toString(mLatencyStats.mMax));
+ metrics.put(resultKeyPrefix + "latency_min", Double.toString(mLatencyStats.mMin));
+ metrics.put(resultKeyPrefix + "latency_mean", Double.toString(mLatencyStats.mMean));
+ metrics.put(resultKeyPrefix + "latency_median", Double.toString(mLatencyStats.mMedian));
+ metrics.put(resultKeyPrefix + "confidence_max", Double.toString(mConfidenceStats.mMax));
+ metrics.put(resultKeyPrefix + "confidence_min", Double.toString(mConfidenceStats.mMin));
+ metrics.put(resultKeyPrefix + "confidence_mean", Double.toString(mConfidenceStats.mMean));
+ metrics.put(
+ resultKeyPrefix + "confidence_median", Double.toString(mConfidenceStats.mMedian));
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/TestRunHelper.java b/prod-tests/src/com/android/media/tests/TestRunHelper.java
new file mode 100644
index 0000000..f752cf3
--- /dev/null
+++ b/prod-tests/src/com/android/media/tests/TestRunHelper.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.media.tests;
+
+import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.result.ITestInvocationListener;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/** Generic helper class for tests */
+public class TestRunHelper {
+
+ private long mTestStartTime = -1;
+ private long mTestStopTime = -1;
+ private ITestInvocationListener mListener;
+ private TestIdentifier mTestId;
+
+ public TestRunHelper(ITestInvocationListener listener, TestIdentifier testId) {
+ mListener = listener;
+ mTestId = testId;
+ }
+
+ public long getTotalTestTime() {
+ return mTestStopTime - mTestStartTime;
+ }
+
+ public void reportFailure(String errMsg) {
+ CLog.e(errMsg);
+ mListener.testFailed(mTestId, errMsg);
+ mListener.testEnded(mTestId, new HashMap<String, String>());
+ mListener.testRunFailed(errMsg);
+ }
+
+ /** @param resultDictionary */
+ public void endTest(Map<String, String> resultDictionary) {
+ mTestStopTime = System.currentTimeMillis();
+ mListener.testEnded(mTestId, resultDictionary);
+ mListener.testRunEnded(getTotalTestTime(), resultDictionary);
+ }
+
+ public void startTest(int numberOfTests) {
+ mListener.testRunStarted(mTestId.getTestName(), numberOfTests);
+ mListener.testStarted(mTestId);
+ mTestStartTime = System.currentTimeMillis();
+ }
+}
diff --git a/prod-tests/src/com/android/media/tests/VideoMultimeterRunner.java b/prod-tests/src/com/android/media/tests/VideoMultimeterRunner.java
index 4ed903d..13fd4d1 100644
--- a/prod-tests/src/com/android/media/tests/VideoMultimeterRunner.java
+++ b/prod-tests/src/com/android/media/tests/VideoMultimeterRunner.java
@@ -78,7 +78,8 @@
String calibrationValue = (mCalibrationMap.containsKey(deviceSerial) ?
mCalibrationMap.get(deviceSerial) : null);
- if (moveArm(deviceSerial) && setupTestEnv(calibrationValue)) {
+ if (mDebugWithoutHardware
+ || (moveArm(deviceSerial) && setupTestEnv(calibrationValue))) {
runMultimeterTest(listener, metrics);
} else {
listener.testFailed(testId, "Failed to set up environment");
diff --git a/prod-tests/src/com/android/media/tests/VideoMultimeterTest.java b/prod-tests/src/com/android/media/tests/VideoMultimeterTest.java
index c287bed..038803f 100644
--- a/prod-tests/src/com/android/media/tests/VideoMultimeterTest.java
+++ b/prod-tests/src/com/android/media/tests/VideoMultimeterTest.java
@@ -79,6 +79,14 @@
"filename of calibration video")
private String mCaliVideoDevicePath = "video_cali.mp4";
+ @Option(
+ name = "debug-without-hardware",
+ description = "Use option to debug test without having specialized hardware",
+ importance = Importance.NEVER,
+ mandatory = false
+ )
+ protected boolean mDebugWithoutHardware = false;
+
static final String ROTATE_LANDSCAPE = "content insert --uri content://settings/system"
+ " --bind name:s:user_rotation --bind value:i:1";
@@ -105,8 +113,12 @@
static final String VIDEO_FRAME_DATA_PATTERN = "OK\\s+\\d+;\\s*(-?\\d+);\\s*[a-z]+;\\s*(\\d+)";
// Regex for: "OK (time); (frame duration); (marker color); (total dropped frames); (lipsync)"
+ // results in: $1 == ts, $2 == lipsync
static final String LIPSYNC_DATA_PATTERN =
- "OK\\s+\\d+;\\s*\\d+;\\s*[a-z]+;\\s*\\d+;\\s*(-?\\d+)";
+ "OK\\s+(\\d+);\\s*\\d+;\\s*[a-z]+;\\s*\\d+;\\s*(-?\\d+)";
+ // ts dur color missed latency
+ static final int LIPSYNC_SIGNAL = 2000000; // every 2 seconds
+ static final int LIPSYNC_SIGNAL_MIN = 1500000; // must be at least 1.5 seconds after prev
ITestDevice mDevice;
@@ -207,16 +219,15 @@
cr = getRunUtil().runTimedCmd(COMMAND_TIMEOUT_MS, mMeterUtilPath, CMD_STOP_MEASUREMENT);
getRunUtil().sleep(3 * 1000);
CLog.i("Stopping measurement: " + cr.getStdout());
- getDevice().unlockDevice();
- getRunUtil().sleep(3 * 1000);
if (caliValues == null) {
return doCalibration();
} else {
CLog.i("Setting calibration values: " + caliValues);
- cr = getRunUtil().runTimedCmd(COMMAND_TIMEOUT_MS, mMeterUtilPath,
- CMD_SET_CALIBRATION_VALS + " " + caliValues);
- if (cr.getStdout().contains("OK")) {
+ final String calibrationValues = CMD_SET_CALIBRATION_VALS + " " + caliValues;
+ cr = getRunUtil().runTimedCmd(COMMAND_TIMEOUT_MS, mMeterUtilPath, calibrationValues);
+ final String response = mDebugWithoutHardware ? "OK" : cr.getStdout();
+ if (response != null && response.startsWith("OK")) {
CLog.i("Calibration values are set to: " + caliValues);
return true;
} else {
@@ -230,7 +241,6 @@
throws DeviceNotAvailableException {
CommandResult cr;
getDevice().clearErrorDialogs();
- getDevice().unlockDevice();
getRunUtil().sleep(mWaitTimeBetweenRuns);
// play test video
@@ -425,15 +435,25 @@
// parse lipsync results (the audio and video synchronization offset)
// format: "OK (time); (frame duration); (marker color); (total dropped frames); (lipsync)"
- p = Pattern.compile(LIPSYNC_DATA_PATTERN);
if (lipsync) {
ArrayList<Integer> lipsyncVals = new ArrayList<>();
StringBuilder lipsyncValsStr = new StringBuilder("[");
long lipsyncSum = 0;
+ int lipSyncLastTime = -1;
+
+ Pattern pLip = Pattern.compile(LIPSYNC_DATA_PATTERN);
for (int i = 0; i < lines.length; i++) {
- m = p.matcher(lines[i].trim());
+ m = pLip.matcher(lines[i].trim());
if (m.matches()) {
- int lipSyncVal = Integer.parseInt(m.group(1));
+ int lipSyncTime = Integer.parseInt(m.group(1));
+ int lipSyncVal = Integer.parseInt(m.group(2));
+ if (lipSyncLastTime != -1) {
+ if ((lipSyncTime - lipSyncLastTime) < LIPSYNC_SIGNAL_MIN) {
+ continue; // ignore the early/spurious one
+ }
+ }
+ lipSyncLastTime = lipSyncTime;
+
lipsyncVals.add(lipSyncVal);
lipsyncValsStr.append(lipSyncVal);
lipsyncValsStr.append(", ");
diff --git a/prod-tests/src/com/android/monkey/AnrReportGenerator.java b/prod-tests/src/com/android/monkey/AnrReportGenerator.java
index 271bbcb..68525c0 100644
--- a/prod-tests/src/com/android/monkey/AnrReportGenerator.java
+++ b/prod-tests/src/com/android/monkey/AnrReportGenerator.java
@@ -156,10 +156,10 @@
command);
if (cr.getStatus() == CommandStatus.SUCCESS) {
// Test log the generated HTML report
- InputStreamSource source = new FileInputStreamSource(htmlReport);
- logger.testLog("monkey-anr-report", LogDataType.HTML, source);
+ try (InputStreamSource source = new FileInputStreamSource(htmlReport)) {
+ logger.testLog("monkey-anr-report", LogDataType.HTML, source);
+ }
// Clean up and declare success!
- source.cancel();
FileUtil.deleteFile(htmlReport);
return true;
} else {
diff --git a/prod-tests/src/com/android/monkey/MonkeyBase.java b/prod-tests/src/com/android/monkey/MonkeyBase.java
index c4f4f9d..c99c33e 100644
--- a/prod-tests/src/com/android/monkey/MonkeyBase.java
+++ b/prod-tests/src/com/android/monkey/MonkeyBase.java
@@ -445,11 +445,8 @@
protected void takeScreenshot(ITestInvocationListener listener, String screenshotName)
throws DeviceNotAvailableException {
if (mScreenshot) {
- InputStreamSource screenshot = mTestDevice.getScreenshot("JPEG");
- try {
+ try (InputStreamSource screenshot = mTestDevice.getScreenshot("JPEG")) {
listener.testLog(screenshotName, LogDataType.JPEG, screenshot);
- } finally {
- screenshot.cancel();
}
}
}
@@ -505,9 +502,7 @@
*/
protected MonkeyLogItem createMonkeyLog(ITestInvocationListener listener, String monkeyLogName,
String log) {
-
- InputStreamSource source = new ByteArrayInputStreamSource(log.getBytes());
- try {
+ try (InputStreamSource source = new ByteArrayInputStreamSource(log.getBytes())) {
if (mAnrGen != null) {
mAnrGen.setMonkeyLogInfo(source);
}
@@ -518,8 +513,6 @@
CLog.e("Could not process monkey log.");
CLog.e(e);
return null;
- } finally {
- source.cancel();
}
}
diff --git a/prod-tests/src/com/android/monkey/MonkeyPairedBase.java b/prod-tests/src/com/android/monkey/MonkeyPairedBase.java
index d184ca5..1739916 100644
--- a/prod-tests/src/com/android/monkey/MonkeyPairedBase.java
+++ b/prod-tests/src/com/android/monkey/MonkeyPairedBase.java
@@ -45,8 +45,6 @@
private ITestDevice mCompanion;
private List<ITestDevice> mDeviceList = new ArrayList<>();
- private Map<ITestDevice, IBuildInfo> mInfoMap = null;
- private Object mCompanionLock = new Object();
private ScheduledExecutorService mScheduler;
/**
@@ -104,6 +102,5 @@
public void setDeviceInfos(Map<ITestDevice, IBuildInfo> deviceInfos) {
ClockworkUtils cwUtils = new ClockworkUtils();
mCompanion = cwUtils.setUpMultiDevice(deviceInfos, mDeviceList);
- mInfoMap = deviceInfos;
}
}
\ No newline at end of file
diff --git a/prod-tests/src/com/android/performance/tests/AppTransitionTests.java b/prod-tests/src/com/android/performance/tests/AppTransitionTests.java
index 10dab9d..667b037 100644
--- a/prod-tests/src/com/android/performance/tests/AppTransitionTests.java
+++ b/prod-tests/src/com/android/performance/tests/AppTransitionTests.java
@@ -667,12 +667,10 @@
}
/**
- * Reverse the given appName,componentName info map to
- * componenetName,appName info map.
- *
- * @return
+ * Reverse and returnthe given appName,componentName info map to componenetName,appName info
+ * map.
*/
- private Map<String, String> reverseAppCmpInfoMap(Map<String,String> appNameCmpNameMap) {
+ private Map<String, String> reverseAppCmpInfoMap(Map<String, String> appNameCmpNameMap) {
Map<String, String> cmpNameAppNameMap = new HashMap<String, String>();
for (Map.Entry<String, String> entry : appNameCmpNameMap.entrySet()) {
cmpNameAppNameMap.put(entry.getValue(), entry.getKey());
diff --git a/prod-tests/src/com/android/performance/tests/EmmcPerformanceTest.java b/prod-tests/src/com/android/performance/tests/EmmcPerformanceTest.java
index 91473ac..81eebc2 100644
--- a/prod-tests/src/com/android/performance/tests/EmmcPerformanceTest.java
+++ b/prod-tests/src/com/android/performance/tests/EmmcPerformanceTest.java
@@ -376,7 +376,7 @@
if (mTestDevice.enableAdbRoot()) {
String output = mTestDevice.executeShellCommand("vdc dump | grep cache");
CLog.d("Output from shell command 'vdc dump | grep cache': %s", output);
- String[] segments = output.split(" ");
+ String[] segments = output.split("\\s+");
if (segments.length >= 3) {
mCache = segments[2];
} else {
@@ -394,10 +394,10 @@
// Filesystem 1K-blocks Used Available Use% Mounted on
// /dev/block/mmcblk0p34 60400 56 60344 1% /cache
String output = mTestDevice.executeShellCommand("df cache");
- CLog.d(String.format("Output from shell command 'df cache': %s", output));
+ CLog.d(String.format("Output from shell command 'df cache':\n%s", output));
String[] lines = output.split("\r?\n");
if (lines.length >= 2) {
- String[] segments = lines[1].split(" ");
+ String[] segments = lines[1].split("\\s+");
if (segments.length >= 2) {
if (lines[0].toLowerCase().contains("1k-blocks")) {
mCachePartitionSize = Integer.parseInt(segments[1]) / 1024;
diff --git a/prod-tests/src/com/android/performance/tests/HermeticLaunchTest.java b/prod-tests/src/com/android/performance/tests/HermeticLaunchTest.java
index 57758a2..fefc954 100644
--- a/prod-tests/src/com/android/performance/tests/HermeticLaunchTest.java
+++ b/prod-tests/src/com/android/performance/tests/HermeticLaunchTest.java
@@ -43,7 +43,6 @@
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
-import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
@@ -254,10 +253,7 @@
*/
public void analyzeLogCatData(Set<String> activitySet) {
Map<String, List<Integer>> amLaunchTimes = new HashMap<>();
- InputStreamSource input = mLogcat.getLogcatData();
- InputStream inputStream = input.createInputStream();
- InputStreamReader streamReader = new InputStreamReader(inputStream);
- BufferedReader br = new BufferedReader(streamReader);
+
Map<Pattern, String> activityPatternMap = new HashMap<>();
Matcher match = null;
String line;
@@ -279,7 +275,9 @@
activityName);
}
- try {
+ try (InputStreamSource input = mLogcat.getLogcatData();
+ BufferedReader br =
+ new BufferedReader(new InputStreamReader(input.createInputStream()))) {
while ((line = br.readLine()) != null) {
/*
* Launch entry needed otherwise we will end up in comparing all the lines for all
@@ -304,11 +302,6 @@
}
} catch (IOException io) {
CLog.e(io);
- } finally {
- StreamUtil.cancel(input);
- StreamUtil.close(inputStream);
- StreamUtil.close(streamReader);
- StreamUtil.close(br);
}
// Verify logcat data
@@ -341,7 +334,8 @@
/**
* To extract the launch time displayed in given line
- * @param currentLine
+ *
+ * @param duration
* @return
*/
public int extractLaunchTime(String duration) {
@@ -382,9 +376,10 @@
splitName[splitName.length - 1]);
// Upload the file if needed
if (msaveAtrace) {
- FileInputStreamSource stream = new FileInputStreamSource(currentAtraceFile);
- listener.testLog(currentAtraceFile.getName(), LogDataType.TEXT, stream);
- stream.cancel();
+ try (FileInputStreamSource stream =
+ new FileInputStreamSource(currentAtraceFile)) {
+ listener.testLog(currentAtraceFile.getName(), LogDataType.TEXT, stream);
+ }
}
// Remove the atrace files
FileUtil.deleteFile(currentAtraceFile);
diff --git a/prod-tests/src/com/android/performance/tests/StartupMetricsTest.java b/prod-tests/src/com/android/performance/tests/StartupMetricsTest.java
index 6e391f3..195f227 100644
--- a/prod-tests/src/com/android/performance/tests/StartupMetricsTest.java
+++ b/prod-tests/src/com/android/performance/tests/StartupMetricsTest.java
@@ -127,16 +127,13 @@
BugreportParser parser = new BugreportParser();
BugreportItem bugreport = null;
// Retrieve bugreport
- InputStreamSource bugSource = mTestDevice.getBugreport();
- try {
+ try (InputStreamSource bugSource = mTestDevice.getBugreport()) {
listener.testLog(BUGREPORT_LOG_NAME, LogDataType.BUGREPORT, bugSource);
bugreport = parser.parse(new BufferedReader(new InputStreamReader(
bugSource.createInputStream())));
} catch (IOException e) {
Assert.fail(String.format("Failed to fetch and parse bugreport for device %s: %s",
mTestDevice.getSerialNumber(), e));
- } finally {
- bugSource.cancel();
}
if (bugreport != null) {
@@ -187,11 +184,9 @@
* Aggregates the procrank data by the pss, rss, and uss values.
*
* @param listener the {@link ITestInvocationListener} of test results
- * @param procRankMap the {@link Map} parsed from brillopad for the procrank
- * section
+ * @param procrank the {@link Map} parsed from brillopad for the procrank section
*/
- void parseProcRankMap(ITestInvocationListener listener,
- ProcrankItem procrank) {
+ void parseProcRankMap(ITestInvocationListener listener, ProcrankItem procrank) {
// final maps for pss, rss, and uss.
Map<String, String> pssOutput = new HashMap<String, String>();
Map<String, String> rssOutput = new HashMap<String, String>();
diff --git a/prod-tests/src/com/android/performance/tests/VellamoBenchmark.java b/prod-tests/src/com/android/performance/tests/VellamoBenchmark.java
index 450ac7b..f6f5b53 100644
--- a/prod-tests/src/com/android/performance/tests/VellamoBenchmark.java
+++ b/prod-tests/src/com/android/performance/tests/VellamoBenchmark.java
@@ -100,10 +100,9 @@
isTimedOut = (System.currentTimeMillis() - benchmarkStartTime >= TIMEOUT_MS);
// get the logcat and parse
- BufferedReader logcat =
+ try (BufferedReader logcat =
new BufferedReader(
- new InputStreamReader(device.getLogcat().createInputStream()));
- try {
+ new InputStreamReader(device.getLogcat().createInputStream()))) {
while ((line = logcat.readLine()) != null) {
// filter only output from the Vellamo process
if (!line.contains(LOGTAG)) {
diff --git a/prod-tests/src/com/android/security/tests/SELinuxDenialsTests.java b/prod-tests/src/com/android/security/tests/SELinuxDenialsTests.java
index de42824..88330d9 100644
--- a/prod-tests/src/com/android/security/tests/SELinuxDenialsTests.java
+++ b/prod-tests/src/com/android/security/tests/SELinuxDenialsTests.java
@@ -266,7 +266,7 @@
CLog.e(String.format("Failed to fetch and parse bugreport for device %s: %s",
mDevice.getSerialNumber(), e));
} finally {
- bugreportSource.cancel();
+ bugreportSource.close();
}
return kernelLog;
@@ -336,10 +336,10 @@
return;
// in order to attach logs to the listener, they need to be of type InputStreamSource
- InputStreamSource logsInputStream = new ByteArrayInputStreamSource(logsStr.getBytes());
- // attach logs to listener, so the logs will be available to result reporters
- listener.testLog(dataName, dataType, logsInputStream);
- // cleanup the InputStreamSource
- logsInputStream.cancel();
+ try (InputStreamSource logsInputStream =
+ new ByteArrayInputStreamSource(logsStr.getBytes())) {
+ // attach logs to listener, so the logs will be available to result reporters
+ listener.testLog(dataName, dataType, logsInputStream);
+ }
}
}
diff --git a/prod-tests/src/com/android/wireless/tests/TelephonyStabilityTest.java b/prod-tests/src/com/android/wireless/tests/TelephonyStabilityTest.java
index 3891e6e..76f2de0 100644
--- a/prod-tests/src/com/android/wireless/tests/TelephonyStabilityTest.java
+++ b/prod-tests/src/com/android/wireless/tests/TelephonyStabilityTest.java
@@ -155,12 +155,9 @@
if (shouldTakeReport) {
lastBugreportIteration = currentIteration - 1;
- InputStreamSource bugreport = mTestDevice.getBugreport();
- try {
+ try (InputStreamSource bugreport = mTestDevice.getBugreport()) {
listener.testLog(String.format("bugreport_%04d", lastBugreportIteration),
LogDataType.BUGREPORT, bugreport);
- } finally {
- bugreport.cancel();
}
}
diff --git a/proto/file_metadata.proto b/proto/file_metadata.proto
index 8edf0bd..cf82d42 100644
--- a/proto/file_metadata.proto
+++ b/proto/file_metadata.proto
@@ -21,6 +21,8 @@
PROCRANK=10;
SERVICES=11;
TOP=12;
+ // skip item 13 ERRORSTATS
+ MUGSHOT=14;
}
// Represents a single log file
diff --git a/res/config/suite/apct.xml b/res/config/suite/apct.xml
new file mode 100644
index 0000000..c7ddbe9
--- /dev/null
+++ b/res/config/suite/apct.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright 2017 Google Inc. All Rights Reserved. -->
+<configuration description="Android APCT test suite config.">
+ <test class="com.android.tradefed.testtype.suite.TfSuiteRunner">
+ <option name="run-suite-tag" value="apct" />
+ </test>
+</configuration>
diff --git a/src/com/android/tradefed/build/LocalDeviceBuildProvider.java b/src/com/android/tradefed/build/LocalDeviceBuildProvider.java
index 1ac6fe4..0bbfcef 100644
--- a/src/com/android/tradefed/build/LocalDeviceBuildProvider.java
+++ b/src/com/android/tradefed/build/LocalDeviceBuildProvider.java
@@ -282,7 +282,8 @@
this.mBuildDir = buildDir;
}
- File getTestDir() {
+ /** Returns the directory where the tests are located. */
+ public File getTestDir() {
return mTestDir;
}
diff --git a/src/com/android/tradefed/build/OtatoolsBuildInfo.java b/src/com/android/tradefed/build/OtatoolsBuildInfo.java
index 9f9d3f1..f9e7329 100644
--- a/src/com/android/tradefed/build/OtatoolsBuildInfo.java
+++ b/src/com/android/tradefed/build/OtatoolsBuildInfo.java
@@ -29,6 +29,13 @@
private static final String RELEASETOOLS_DIR_NAME = "otatools_releasetools";
/**
+ * Creates a {@link OtatoolsBuildInfo}
+ */
+ public OtatoolsBuildInfo(String buildId, String buildTargetName) {
+ super(buildId, buildTargetName);
+ }
+
+ /**
* Add /build/target/product/security to this file map
*/
public void setSecurityDir(File dir, String version) {
diff --git a/src/com/android/tradefed/command/CommandOptions.java b/src/com/android/tradefed/command/CommandOptions.java
index 56cc407..8249b8f 100644
--- a/src/com/android/tradefed/command/CommandOptions.java
+++ b/src/com/android/tradefed/command/CommandOptions.java
@@ -137,6 +137,15 @@
)
private boolean mUseTfSharding = false;
+ public static final String USE_SANDBOX = "use-sandbox";
+
+ @Option(
+ name = USE_SANDBOX,
+ description = "Set if the invocation should use a sandbox to run or not."
+ )
+ private boolean mUseSandbox = false;
+
+
/**
* Set the help mode for the config.
* <p/>
@@ -388,4 +397,16 @@
public boolean shouldUseTfSharding() {
return mUseTfSharding;
}
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean shouldUseSandboxing() {
+ return mUseSandbox;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void setShouldUseSandboxing(boolean use) {
+ mUseSandbox = use;
+ }
}
diff --git a/src/com/android/tradefed/command/CommandRunner.java b/src/com/android/tradefed/command/CommandRunner.java
index 6ebeace..82eb2eb 100644
--- a/src/com/android/tradefed/command/CommandRunner.java
+++ b/src/com/android/tradefed/command/CommandRunner.java
@@ -34,7 +34,7 @@
private ICommandScheduler mScheduler;
private ExitCode mErrorCode = ExitCode.NO_ERROR;
- private static final long CHECK_DEVICE_TIMEOUT = 15000;
+ private static final long CHECK_DEVICE_TIMEOUT = 60000;
public CommandRunner() {}
@@ -87,7 +87,8 @@
}
try {
mScheduler.join(getCheckDeviceTimeout());
- // After 15 seconds we check if the command was executed.
+ // FIXME: if possible make the no_device allocated check deterministic.
+ // After 1 min we check if the command was executed.
if (mScheduler.getReadyCommandCount() > 0) {
printStackTrace(new NoDeviceException("No device was allocated for the command."));
mErrorCode = ExitCode.NO_DEVICE_ALLOCATED;
diff --git a/src/com/android/tradefed/command/CommandScheduler.java b/src/com/android/tradefed/command/CommandScheduler.java
index ed4eaee..8cdb7e2 100644
--- a/src/com/android/tradefed/command/CommandScheduler.java
+++ b/src/com/android/tradefed/command/CommandScheduler.java
@@ -36,6 +36,7 @@
import com.android.tradefed.config.IDeviceConfiguration;
import com.android.tradefed.config.IGlobalConfiguration;
import com.android.tradefed.config.Option;
+import com.android.tradefed.config.SandboxConfigurationFactory;
import com.android.tradefed.device.DeviceAllocationState;
import com.android.tradefed.device.DeviceManager;
import com.android.tradefed.device.DeviceNotAvailableException;
@@ -59,6 +60,8 @@
import com.android.tradefed.log.LogUtil.CLog;
import com.android.tradefed.result.ITestInvocationListener;
import com.android.tradefed.result.ResultForwarder;
+import com.android.tradefed.sandbox.ISandbox;
+import com.android.tradefed.sandbox.TradefedSandbox;
import com.android.tradefed.util.ArrayUtil;
import com.android.tradefed.util.FileUtil;
import com.android.tradefed.util.QuotationAwareTokenizer;
@@ -1099,11 +1102,36 @@
return internalAddCommand(args, totalExecTime, null);
}
+ /** Returns true if {@link CommandOptions#USE_SANDBOX} is part of the command line. */
+ private boolean isCommandSandboxed(String[] args) {
+ for (String arg : args) {
+ if (("--" + CommandOptions.USE_SANDBOX).equals(arg)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /** Create a {@link ISandbox} that the invocation will use to run. */
+ public ISandbox createSandbox() {
+ return new TradefedSandbox();
+ }
+
+ private IConfiguration createConfiguration(String[] args) throws ConfigurationException {
+ // check if the command should be sandboxed
+ if (isCommandSandboxed(args)) {
+ // Create an sandboxed configuration based on the sandbox of the scheduler.
+ ISandbox sandbox = createSandbox();
+ return SandboxConfigurationFactory.getInstance()
+ .createConfigurationFromArgs(args, getKeyStoreClient(), sandbox, new RunUtil());
+ }
+ return getConfigFactory().createConfigurationFromArgs(args, null, getKeyStoreClient());
+ }
+
private boolean internalAddCommand(String[] args, long totalExecTime, String cmdFilePath)
throws ConfigurationException {
assertStarted();
- IConfiguration config = getConfigFactory().createConfigurationFromArgs(args, null,
- getKeyStoreClient());
+ IConfiguration config = createConfiguration(args);
if (config.getCommandOptions().isHelpMode()) {
getConfigFactory().printHelpForConfig(args, true, System.out);
} else if (config.getCommandOptions().isFullHelpMode()) {
@@ -1307,11 +1335,10 @@
return ArrayUtil.join(" ", (Object[])args);
}
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
- public void execCommand(IScheduledInvocationListener listener, String[] args)
+ public void execCommand(
+ IInvocationContext context, IScheduledInvocationListener listener, String[] args)
throws ConfigurationException, NoDeviceException {
assertStarted();
IDeviceManager manager = getDeviceManager();
@@ -1321,7 +1348,6 @@
config.validateOptions();
ExecutableCommand execCmd = createExecutableCommand(cmdTracker, config, false);
- IInvocationContext context = new InvocationContext();
context.setConfigurationDescriptor(config.getConfigurationDescription());
Map<String, ITestDevice> devices = allocateDevices(config, manager);
if (!devices.isEmpty()) {
@@ -1337,6 +1363,13 @@
}
}
+ /** {@inheritDoc} */
+ @Override
+ public void execCommand(IScheduledInvocationListener listener, String[] args)
+ throws ConfigurationException, NoDeviceException {
+ execCommand(new InvocationContext(), listener, args);
+ }
+
/**
* Allocate devices for a config.
* @param config a {@link IConfiguration} has device requirements.
diff --git a/src/com/android/tradefed/command/Console.java b/src/com/android/tradefed/command/Console.java
index 56f3579..4cd4db7 100644
--- a/src/com/android/tradefed/command/Console.java
+++ b/src/com/android/tradefed/command/Console.java
@@ -873,12 +873,16 @@
*/
@SuppressWarnings("unchecked")
void executeCmdRunnable(Runnable command, CaptureList groups) {
- if (command instanceof ArgRunnable) {
- // FIXME: verify that command implements ArgRunnable<CaptureList> instead
- // FIXME: of just ArgRunnable
- ((ArgRunnable<CaptureList>)command).run(groups);
- } else {
- command.run();
+ try {
+ if (command instanceof ArgRunnable) {
+ // FIXME: verify that command implements ArgRunnable<CaptureList> instead
+ // FIXME: of just ArgRunnable
+ ((ArgRunnable<CaptureList>) command).run(groups);
+ } else {
+ command.run();
+ }
+ } catch (RuntimeException e) {
+ e.printStackTrace();
}
}
diff --git a/src/com/android/tradefed/command/ICommandOptions.java b/src/com/android/tradefed/command/ICommandOptions.java
index d3acea6..b202398 100644
--- a/src/com/android/tradefed/command/ICommandOptions.java
+++ b/src/com/android/tradefed/command/ICommandOptions.java
@@ -154,4 +154,10 @@
/** Returns true if we should use Tf new sharding logic */
public boolean shouldUseTfSharding();
+
+ /** Returns true if we should use Tf containers to run the invocation */
+ public boolean shouldUseSandboxing();
+
+ /** Sets whether or not we should use TF containers */
+ public void setShouldUseSandboxing(boolean use);
}
diff --git a/src/com/android/tradefed/command/ICommandScheduler.java b/src/com/android/tradefed/command/ICommandScheduler.java
index b24c359..7bfc717 100644
--- a/src/com/android/tradefed/command/ICommandScheduler.java
+++ b/src/com/android/tradefed/command/ICommandScheduler.java
@@ -118,6 +118,20 @@
String[] args) throws ConfigurationException;
/**
+ * Directly allocates a device and executes a command without adding it to the command queue
+ * using an already existing {@link IInvocationContext}.
+ *
+ * @param context an existing {@link IInvocationContext}.
+ * @param listener the {@link ICommandScheduler.IScheduledInvocationListener} to be informed
+ * @param args the command arguments
+ * @throws ConfigurationException if command was invalid
+ * @throws NoDeviceException if there is no device to use
+ */
+ public void execCommand(
+ IInvocationContext context, IScheduledInvocationListener listener, String[] args)
+ throws ConfigurationException, NoDeviceException;
+
+ /**
* Remove all commands from scheduler
*/
public void removeAllCommands();
diff --git a/src/com/android/tradefed/config/ArgsOptionParser.java b/src/com/android/tradefed/config/ArgsOptionParser.java
index 8a561c5..707f687 100644
--- a/src/com/android/tradefed/config/ArgsOptionParser.java
+++ b/src/com/android/tradefed/config/ArgsOptionParser.java
@@ -137,6 +137,8 @@
static final String SHORT_NAME_PREFIX = "-";
static final String OPTION_NAME_PREFIX = "--";
+ // For a boolean pattern match: {device name}namespace:(no-)option-name
+ static final Pattern BOOL_FALSE_DEVICE_PATTERN = Pattern.compile("(\\{.*\\})(.*:)?(no-)(.+)");
/** the amount to indent an option field's description when displaying help */
private static final int OPTION_DESCRIPTION_INDENT = 25;
@@ -304,7 +306,13 @@
if (value == null) {
if (isBooleanOption(name)) {
int idx = name.indexOf(NAMESPACE_SEPARATOR);
- value = name.startsWith(BOOL_FALSE_PREFIX, idx + 1) ? "false" : "true";
+ // Detect a device tag in front of the boolean option.
+ Matcher m = BOOL_FALSE_DEVICE_PATTERN.matcher(name);
+ if (m.find()) {
+ value = "false";
+ } else {
+ value = name.startsWith(BOOL_FALSE_PREFIX, idx + 1) ? "false" : "true";
+ }
} else if (isMapOption(name)) {
// Support --option key=value and --option key value format
String tmp = grabNextValue(args, name, "for its key");
diff --git a/src/com/android/tradefed/config/Configuration.java b/src/com/android/tradefed/config/Configuration.java
index 26e2166..9e6b05f 100644
--- a/src/com/android/tradefed/config/Configuration.java
+++ b/src/com/android/tradefed/config/Configuration.java
@@ -24,6 +24,7 @@
import com.android.tradefed.device.IDeviceRecovery;
import com.android.tradefed.device.IDeviceSelection;
import com.android.tradefed.device.TestDeviceOptions;
+import com.android.tradefed.device.metric.IMetricCollector;
import com.android.tradefed.log.ILeveledLogOutput;
import com.android.tradefed.log.StdoutLogger;
import com.android.tradefed.profiler.ITestProfiler;
@@ -87,6 +88,8 @@
public static final String CONFIGURATION_DESCRIPTION_TYPE_NAME = "config_desc";
public static final String DEVICE_NAME = "device";
public static final String TEST_PROFILER_TYPE_NAME = "test_profiler";
+ public static final String DEVICE_METRICS_COLLECTOR_TYPE_NAME = "metrics_collector";
+ public static final String SANDBOX_TYPE_NAME = "sandbox";
private static Map<String, ObjTypeInfo> sObjTypeMap = null;
private static Set<String> sMultiDeviceSupportedTag = null;
@@ -160,6 +163,9 @@
CONFIGURATION_DESCRIPTION_TYPE_NAME,
new ObjTypeInfo(ConfigurationDescriptor.class, false));
sObjTypeMap.put(TEST_PROFILER_TYPE_NAME, new ObjTypeInfo(ITestProfiler.class, false));
+ sObjTypeMap.put(
+ DEVICE_METRICS_COLLECTOR_TYPE_NAME,
+ new ObjTypeInfo(IMetricCollector.class, true));
}
return sObjTypeMap;
}
@@ -210,6 +216,7 @@
setSystemStatusCheckers(new ArrayList<ISystemStatusChecker>());
setConfigurationDescriptor(new ConfigurationDescriptor());
setProfiler(new StubTestProfiler());
+ setDeviceMetricCollectors(new ArrayList<>());
}
/**
@@ -348,6 +355,13 @@
RESULT_REPORTER_TYPE_NAME);
}
+ @SuppressWarnings("unchecked")
+ @Override
+ public List<IMetricCollector> getMetricCollectors() {
+ return (List<IMetricCollector>)
+ getConfigurationObjectList(DEVICE_METRICS_COLLECTOR_TYPE_NAME);
+ }
+
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
@@ -641,6 +655,11 @@
setConfigurationObjectListNoThrow(RESULT_REPORTER_TYPE_NAME, listeners);
}
+ @Override
+ public void setDeviceMetricCollectors(List<IMetricCollector> collectors) {
+ setConfigurationObjectListNoThrow(DEVICE_METRICS_COLLECTOR_TYPE_NAME, collectors);
+ }
+
/**
* {@inheritDoc}
*/
@@ -1165,6 +1184,12 @@
*/
@Override
public void dumpXml(PrintWriter output) throws IOException {
+ dumpXml(output, new ArrayList<String>());
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void dumpXml(PrintWriter output, List<String> excludeFilters) throws IOException {
KXmlSerializer serializer = new KXmlSerializer();
serializer.setOutput(output);
serializer.setFeature("http://xmlpull.org/v1/doc/features.html#indent-output", true);
@@ -1172,10 +1197,12 @@
serializer.startTag(null, ConfigurationUtil.CONFIGURATION_NAME);
for (IMultiTargetPreparer multipreparer : getMultiTargetPreparers()) {
- ConfigurationUtil.dumpClassToXml(serializer, MULTI_PREPARER_TYPE_NAME, multipreparer);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, MULTI_PREPARER_TYPE_NAME, multipreparer, excludeFilters);
}
for (ISystemStatusChecker checker : getSystemStatusCheckers()) {
- ConfigurationUtil.dumpClassToXml(serializer, SYSTEM_STATUS_CHECKER_TYPE_NAME, checker);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, SYSTEM_STATUS_CHECKER_TYPE_NAME, checker, excludeFilters);
}
if (getDeviceConfig().size() > 1) {
@@ -1184,49 +1211,70 @@
serializer.startTag(null, Configuration.DEVICE_NAME);
serializer.attribute(null, "name", deviceConfig.getDeviceName());
ConfigurationUtil.dumpClassToXml(
- serializer, BUILD_PROVIDER_TYPE_NAME, deviceConfig.getBuildProvider());
+ serializer,
+ BUILD_PROVIDER_TYPE_NAME,
+ deviceConfig.getBuildProvider(),
+ excludeFilters);
for (ITargetPreparer preparer : deviceConfig.getTargetPreparers()) {
ConfigurationUtil.dumpClassToXml(
- serializer, TARGET_PREPARER_TYPE_NAME, preparer);
+ serializer, TARGET_PREPARER_TYPE_NAME, preparer, excludeFilters);
}
ConfigurationUtil.dumpClassToXml(
- serializer, DEVICE_RECOVERY_TYPE_NAME, deviceConfig.getDeviceRecovery());
+ serializer,
+ DEVICE_RECOVERY_TYPE_NAME,
+ deviceConfig.getDeviceRecovery(),
+ excludeFilters);
ConfigurationUtil.dumpClassToXml(
serializer,
DEVICE_REQUIREMENTS_TYPE_NAME,
- deviceConfig.getDeviceRequirements());
+ deviceConfig.getDeviceRequirements(),
+ excludeFilters);
ConfigurationUtil.dumpClassToXml(
- serializer, DEVICE_OPTIONS_TYPE_NAME, deviceConfig.getDeviceOptions());
+ serializer,
+ DEVICE_OPTIONS_TYPE_NAME,
+ deviceConfig.getDeviceOptions(),
+ excludeFilters);
serializer.endTag(null, Configuration.DEVICE_NAME);
}
} else {
// Put single device tags
ConfigurationUtil.dumpClassToXml(
- serializer, BUILD_PROVIDER_TYPE_NAME, getBuildProvider());
+ serializer, BUILD_PROVIDER_TYPE_NAME, getBuildProvider(), excludeFilters);
for (ITargetPreparer preparer : getTargetPreparers()) {
- ConfigurationUtil.dumpClassToXml(serializer, TARGET_PREPARER_TYPE_NAME, preparer);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, TARGET_PREPARER_TYPE_NAME, preparer, excludeFilters);
}
ConfigurationUtil.dumpClassToXml(
- serializer, DEVICE_RECOVERY_TYPE_NAME, getDeviceRecovery());
+ serializer, DEVICE_RECOVERY_TYPE_NAME, getDeviceRecovery(), excludeFilters);
ConfigurationUtil.dumpClassToXml(
- serializer, DEVICE_REQUIREMENTS_TYPE_NAME, getDeviceRequirements());
+ serializer,
+ DEVICE_REQUIREMENTS_TYPE_NAME,
+ getDeviceRequirements(),
+ excludeFilters);
ConfigurationUtil.dumpClassToXml(
- serializer, DEVICE_OPTIONS_TYPE_NAME, getDeviceOptions());
+ serializer, DEVICE_OPTIONS_TYPE_NAME, getDeviceOptions(), excludeFilters);
}
for (IRemoteTest test : getTests()) {
- ConfigurationUtil.dumpClassToXml(serializer, TEST_TYPE_NAME, test);
+ ConfigurationUtil.dumpClassToXml(serializer, TEST_TYPE_NAME, test, excludeFilters);
}
+ ConfigurationUtil.dumpClassToXml(
+ serializer,
+ CONFIGURATION_DESCRIPTION_TYPE_NAME,
+ getConfigurationDescription(),
+ excludeFilters);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, LOGGER_TYPE_NAME, getLogOutput(), excludeFilters);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, LOG_SAVER_TYPE_NAME, getLogSaver(), excludeFilters);
+ for (ITestInvocationListener listener : getTestInvocationListeners()) {
+ ConfigurationUtil.dumpClassToXml(
+ serializer, RESULT_REPORTER_TYPE_NAME, listener, excludeFilters);
+ }
+ ConfigurationUtil.dumpClassToXml(
+ serializer, CMD_OPTIONS_TYPE_NAME, getCommandOptions(), excludeFilters);
ConfigurationUtil.dumpClassToXml(
- serializer, CONFIGURATION_DESCRIPTION_TYPE_NAME, getConfigurationDescription());
- ConfigurationUtil.dumpClassToXml(serializer, LOGGER_TYPE_NAME, getLogOutput());
- ConfigurationUtil.dumpClassToXml(serializer, LOG_SAVER_TYPE_NAME, getLogSaver());
- for (ITestInvocationListener listener : getTestInvocationListeners()) {
- ConfigurationUtil.dumpClassToXml(serializer, RESULT_REPORTER_TYPE_NAME, listener);
- }
- ConfigurationUtil.dumpClassToXml(serializer, CMD_OPTIONS_TYPE_NAME, getCommandOptions());
-
- ConfigurationUtil.dumpClassToXml(serializer, TEST_PROFILER_TYPE_NAME, getProfiler());
+ serializer, TEST_PROFILER_TYPE_NAME, getProfiler(), excludeFilters);
serializer.endTag(null, ConfigurationUtil.CONFIGURATION_NAME);
serializer.endDocument();
diff --git a/src/com/android/tradefed/config/ConfigurationDescriptor.java b/src/com/android/tradefed/config/ConfigurationDescriptor.java
index 1cf1345..86cdb5f 100644
--- a/src/com/android/tradefed/config/ConfigurationDescriptor.java
+++ b/src/com/android/tradefed/config/ConfigurationDescriptor.java
@@ -59,6 +59,12 @@
)
private boolean mNotStrictShardable = false;
+ @Option(
+ name = "use-sandboxing",
+ description = "Option used to notify an invocation that it is running in a sandbox."
+ )
+ private boolean mUseSandboxing = false;
+
/** Optional Abi information the configuration will be run against. */
private IAbi mAbi = null;
@@ -106,4 +112,14 @@
public IAbi getAbi() {
return mAbi;
}
+
+ /** Returns true if the invocation should run in sandboxed mode. False otherwise. */
+ public boolean shouldUseSandbox() {
+ return mUseSandboxing;
+ }
+
+ /** Sets whether or not a config will run in sandboxed mode or not. */
+ public void setSandboxed(boolean useSandboxed) {
+ mUseSandboxing = useSandboxed;
+ }
}
diff --git a/src/com/android/tradefed/config/ConfigurationFactory.java b/src/com/android/tradefed/config/ConfigurationFactory.java
index 628702b..9ce9a0e 100644
--- a/src/com/android/tradefed/config/ConfigurationFactory.java
+++ b/src/com/android/tradefed/config/ConfigurationFactory.java
@@ -220,6 +220,7 @@
for (String configFileName : possibleConfigFileNames) {
File config = FileUtil.findFile(testCasesDir, configFileName);
if (config != null) {
+ CLog.d("Using config: %s/%s", testCasesDir.getAbsoluteFile(), configFileName);
return config;
}
}
@@ -267,14 +268,12 @@
if (def == null || def.isStale()) {
def = new ConfigurationDef(configName);
- loadConfiguration(configName, def, templateMap);
+ loadConfiguration(configName, def, null, templateMap);
mConfigDefMap.put(configId, def);
} else {
if (templateMap != null) {
// Clearing the map before returning the cached config to
- // avoid seeing them as
- // unused.
- CLog.d("Using cached configuration, ensuring map is clean.");
+ // avoid seeing them as unused.
templateMap.clear();
}
}
@@ -316,15 +315,19 @@
}
}
- @Override
/**
- * Configs that are bundled inside the tradefed.jar can only include
- * other configs also bundled inside tradefed.jar. However, local
- * (external) configs can include both local (external) and bundled
- * configs.
+ * Configs that are bundled inside the tradefed.jar can only include other configs also
+ * bundled inside tradefed.jar. However, local (external) configs can include both local
+ * (external) and bundled configs.
*/
- public void loadIncludedConfiguration(ConfigurationDef def, String parentName, String name,
- Map<String, String> templateMap) throws ConfigurationException {
+ @Override
+ public void loadIncludedConfiguration(
+ ConfigurationDef def,
+ String parentName,
+ String name,
+ String deviceTagObject,
+ Map<String, String> templateMap)
+ throws ConfigurationException {
String config_name = name;
if (!isBundledConfig(name)) {
@@ -362,25 +365,31 @@
"Circular configuration include: config '%s' is already included",
config_name));
}
- loadConfiguration(config_name, def, templateMap);
+ loadConfiguration(config_name, def, deviceTagObject, templateMap);
}
/**
* Loads a configuration.
*
- * @param name the name of a built-in configuration to load or a file
- * path to configuration xml to load
+ * @param name the name of a built-in configuration to load or a file path to configuration
+ * xml to load
* @param def the loaded {@link ConfigurationDef}
- * @param templateMap map from template-include names to their
- * respective concrete configuration files
- * @throws ConfigurationException if a configuration with given
- * name/file path cannot be loaded or parsed
+ * @param deviceTagObject name of the current deviceTag if we are loading from a config
+ * inside an <include>. Null otherwise.
+ * @param templateMap map from template-include names to their respective concrete
+ * configuration files
+ * @throws ConfigurationException if a configuration with given name/file path cannot be
+ * loaded or parsed
*/
- void loadConfiguration(String name, ConfigurationDef def, Map<String, String> templateMap)
+ void loadConfiguration(
+ String name,
+ ConfigurationDef def,
+ String deviceTagObject,
+ Map<String, String> templateMap)
throws ConfigurationException {
- Log.d(LOG_TAG, String.format("Loading configuration '%s'", name));
+ //Log.d(LOG_TAG, String.format("Loading configuration '%s'", name));
BufferedInputStream bufStream = getConfigStream(name);
- ConfigurationXmlParser parser = new ConfigurationXmlParser(this);
+ ConfigurationXmlParser parser = new ConfigurationXmlParser(this, deviceTagObject);
parser.parse(def, name, bufStream, templateMap);
// Track local config source files
@@ -415,13 +424,14 @@
/**
* Retrieve the {@link ConfigurationDef} for the given name
*
- * @param name the name of a built-in configuration to load or a file path
- * to configuration xml to load
+ * @param name the name of a built-in configuration to load or a file path to configuration xml
+ * to load
* @return {@link ConfigurationDef}
* @throws ConfigurationException if an error occurred loading the config
*/
- private ConfigurationDef getConfigurationDef(String name, boolean isGlobal,
- Map<String, String> templateMap) throws ConfigurationException {
+ ConfigurationDef getConfigurationDef(
+ String name, boolean isGlobal, Map<String, String> templateMap)
+ throws ConfigurationException {
return new ConfigLoader(isGlobal).getConfigurationDef(name, templateMap);
}
@@ -597,9 +607,17 @@
*/
@Override
public List<String> getConfigList(String subPath) {
+ return getConfigList(subPath, true);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public List<String> getConfigList(String subPath, boolean loadFromEnv) {
Set<String> configNames = getConfigSetFromClasspath(subPath);
- // list config on variable path too
- configNames.addAll(getConfigNamesFromTestCases(subPath));
+ if (loadFromEnv) {
+ // list config on variable path too
+ configNames.addAll(getConfigNamesFromTestCases(subPath));
+ }
// sort the configs by name before adding to list
SortedSet<String> configDefs = new TreeSet<String>();
configDefs.addAll(configNames);
diff --git a/src/com/android/tradefed/config/ConfigurationUtil.java b/src/com/android/tradefed/config/ConfigurationUtil.java
index b402ab1..f3eb748 100644
--- a/src/com/android/tradefed/config/ConfigurationUtil.java
+++ b/src/com/android/tradefed/config/ConfigurationUtil.java
@@ -65,9 +65,18 @@
* @param serializer a {@link KXmlSerializer} to create the XML dump
* @param classTypeName a {@link String} of the class type's name
* @param obj {@link Object} to be added to the XML dump
+ * @param excludeClassTypes list of object configuration type to be excluded from the dump. for
+ * example: {@link Configuration#TARGET_PREPARER_TYPE_NAME}.
*/
- static void dumpClassToXml(KXmlSerializer serializer, String classTypeName, Object obj)
+ static void dumpClassToXml(
+ KXmlSerializer serializer,
+ String classTypeName,
+ Object obj,
+ List<String> excludeClassTypes)
throws IOException {
+ if (excludeClassTypes.contains(classTypeName)) {
+ return;
+ }
serializer.startTag(null, classTypeName);
serializer.attribute(null, CLASS_NAME, obj.getClass().getName());
dumpOptionsToXml(serializer, obj);
diff --git a/src/com/android/tradefed/config/ConfigurationXmlParser.java b/src/com/android/tradefed/config/ConfigurationXmlParser.java
index c36f530..32e06fd 100644
--- a/src/com/android/tradefed/config/ConfigurationXmlParser.java
+++ b/src/com/android/tradefed/config/ConfigurationXmlParser.java
@@ -62,6 +62,7 @@
private final ConfigurationDef mConfigDef;
private final Map<String, String> mTemplateMap;
private final String mName;
+ private final boolean mInsideParentDeviceTag;
// State-holding members
private String mCurrentConfigObject;
@@ -72,11 +73,17 @@
private Boolean isLocalConfig = null;
- ConfigHandler(ConfigurationDef def, String name, IConfigDefLoader loader,
+ ConfigHandler(
+ ConfigurationDef def,
+ String name,
+ IConfigDefLoader loader,
+ String parentDeviceObject,
Map<String, String> templateMap) {
mName = name;
mConfigDef = def;
mConfigDefLoader = loader;
+ mCurrentDeviceObject = parentDeviceObject;
+ mInsideParentDeviceTag = (parentDeviceObject != null) ? true : false;
if (templateMap == null) {
mTemplateMap = Collections.<String, String>emptyMap();
@@ -141,7 +148,7 @@
// if it turns out we are in multi mode, we will throw an exception.
mOutsideTag.add(localName);
}
- //if we are inside a device object, some tags are not allowed.
+ // if we are inside a device object, some tags are not allowed.
if (mCurrentDeviceObject != null) {
if (!Configuration.doesBuiltInObjSupportMultiDevice(localName)) {
// Prevent some tags to be inside of a device in multi device mode.
@@ -202,13 +209,9 @@
if (includeName == null) {
throwException("Missing 'name' attribute for include");
}
- if (mCurrentDeviceObject != null) {
- // TODO: Add this use case.
- throwException("<include> inside device object currently not supported.");
- }
try {
- mConfigDefLoader.loadIncludedConfiguration(mConfigDef, mName, includeName,
- mTemplateMap);
+ mConfigDefLoader.loadIncludedConfiguration(
+ mConfigDef, mName, includeName, mCurrentDeviceObject, mTemplateMap);
} catch (ConfigurationException e) {
if (e instanceof TemplateResolutionError) {
throwException(String.format(INNER_TEMPLATE_INCLUDE_ERROR,
@@ -236,8 +239,8 @@
// Removing the used template from the map to avoid re-using it.
mTemplateMap.remove(templateName);
try {
- mConfigDefLoader.loadIncludedConfiguration(mConfigDef, mName, includeName,
- mTemplateMap);
+ mConfigDefLoader.loadIncludedConfiguration(
+ mConfigDef, mName, includeName, null, mTemplateMap);
} catch (ConfigurationException e) {
if (e instanceof TemplateResolutionError) {
throwException(String.format(INNER_TEMPLATE_INCLUDE_ERROR,
@@ -257,7 +260,8 @@
|| GlobalConfiguration.isBuiltInObjType(localName)) {
mCurrentConfigObject = null;
}
- if (DEVICE_TAG.equals(localName)) {
+ if (DEVICE_TAG.equals(localName) && !mInsideParentDeviceTag) {
+ // Only unset if it was not the parent device tag.
mCurrentDeviceObject = null;
}
}
@@ -301,9 +305,15 @@
}
private final IConfigDefLoader mConfigDefLoader;
+ /**
+ * If we are loading a config from inside a <device> tag, this will contain the name of the
+ * current device tag to properly load in context.
+ */
+ private final String mParentDeviceObject;
- ConfigurationXmlParser(IConfigDefLoader loader) {
+ ConfigurationXmlParser(IConfigDefLoader loader, String parentDeviceObject) {
mConfigDefLoader = loader;
+ mParentDeviceObject = parentDeviceObject;
}
/**
@@ -323,8 +333,9 @@
SAXParserFactory parserFactory = SAXParserFactory.newInstance();
parserFactory.setNamespaceAware(true);
SAXParser parser = parserFactory.newSAXParser();
- ConfigHandler configHandler = new ConfigHandler(configDef, name, mConfigDefLoader,
- templateMap);
+ ConfigHandler configHandler =
+ new ConfigHandler(
+ configDef, name, mConfigDefLoader, mParentDeviceObject, templateMap);
parser.parse(new InputSource(xmlInput), configHandler);
checkValidMultiConfiguration(configHandler);
} catch (ParserConfigurationException e) {
diff --git a/src/com/android/tradefed/config/GlobalConfiguration.java b/src/com/android/tradefed/config/GlobalConfiguration.java
index 52c2624..cef3da6 100644
--- a/src/com/android/tradefed/config/GlobalConfiguration.java
+++ b/src/com/android/tradefed/config/GlobalConfiguration.java
@@ -64,13 +64,13 @@
public static final String KEY_STORE_TYPE_NAME = "key_store";
public static final String SHARDING_STRATEGY_TYPE_NAME = "sharding_strategy";
+ public static final String GLOBAL_CONFIG_VARIABLE = "TF_GLOBAL_CONFIG";
+ private static final String GLOBAL_CONFIG_FILENAME = "tf_global_config.xml";
+
private static Map<String, ObjTypeInfo> sObjTypeMap = null;
private static IGlobalConfiguration sInstance = null;
private static final Object sInstanceLock = new Object();
- private static final String GLOBAL_CONFIG_VARIABLE = "TF_GLOBAL_CONFIG";
- private static final String GLOBAL_CONFIG_FILENAME = "tf_global_config.xml";
-
// Empty embedded configuration available by default
private static final String DEFAULT_EMPTY_CONFIG_NAME = "empty";
@@ -655,7 +655,8 @@
whitelistConfigs = CONFIGS_FOR_SUBPROCESS_WHITE_LIST;
}
for (String config : whitelistConfigs) {
- ConfigurationUtil.dumpClassToXml(serializer, config, getConfigurationObject(config));
+ ConfigurationUtil.dumpClassToXml(
+ serializer, config, getConfigurationObject(config), new ArrayList<>());
}
serializer.endTag(null, ConfigurationUtil.CONFIGURATION_NAME);
serializer.endDocument();
diff --git a/src/com/android/tradefed/config/IConfigDefLoader.java b/src/com/android/tradefed/config/IConfigDefLoader.java
index b235f76..d6dd9c9 100644
--- a/src/com/android/tradefed/config/IConfigDefLoader.java
+++ b/src/com/android/tradefed/config/IConfigDefLoader.java
@@ -42,8 +42,15 @@
* @param def the {@link ConfigurationDef} to load the data into
* @param parentName the name of the parent config
* @param name the name of config to include
+ * @param deviceTagObject the name of the current deviceTag or null if not inside a device tag.
+ * @param templateMap the current map of template to be loaded.
* @throws ConfigurationException if an error occurred loading the config
*/
- void loadIncludedConfiguration(ConfigurationDef def, String parentName, String name,
- Map<String, String> templateMap) throws ConfigurationException;
+ void loadIncludedConfiguration(
+ ConfigurationDef def,
+ String parentName,
+ String name,
+ String deviceTagObject,
+ Map<String, String> templateMap)
+ throws ConfigurationException;
}
diff --git a/src/com/android/tradefed/config/IConfiguration.java b/src/com/android/tradefed/config/IConfiguration.java
index 3512b66..4270b4a 100644
--- a/src/com/android/tradefed/config/IConfiguration.java
+++ b/src/com/android/tradefed/config/IConfiguration.java
@@ -22,6 +22,7 @@
import com.android.tradefed.device.IDeviceRecovery;
import com.android.tradefed.device.IDeviceSelection;
import com.android.tradefed.device.TestDeviceOptions;
+import com.android.tradefed.device.metric.IMetricCollector;
import com.android.tradefed.log.ILeveledLogOutput;
import com.android.tradefed.profiler.ITestProfiler;
import com.android.tradefed.result.ILogSaver;
@@ -129,6 +130,9 @@
*/
public ITestProfiler getProfiler();
+ /** Gets the {@link IMetricCollector}s from the configuration. */
+ public List<IMetricCollector> getMetricCollectors();
+
/**
* Gets the {@link ICommandOptions} to use from the configuration.
*
@@ -346,6 +350,9 @@
*/
public void setTestInvocationListener(ITestInvocationListener listener);
+ /** Set the list of {@link IMetricCollector}s, replacing any existing values. */
+ public void setDeviceMetricCollectors(List<IMetricCollector> collectors);
+
/**
* Set the {@link ITestProfiler}, replacing any existing values
*
@@ -504,4 +511,16 @@
* @throws IOException
*/
public void dumpXml(PrintWriter output) throws IOException;
+
+ /**
+ * Gets the expanded XML file for the config with all options shown for this {@link
+ * IConfiguration} minus the objects filters by their key name.
+ *
+ * <p>Filter example: {@link Configuration#TARGET_PREPARER_TYPE_NAME}.
+ *
+ * @param output the writer to print the xml to.
+ * @param excludeFilters the list of object type that should not be dumped.
+ * @throws IOException
+ */
+ public void dumpXml(PrintWriter output, List<String> excludeFilters) throws IOException;
}
diff --git a/src/com/android/tradefed/config/IConfigurationFactory.java b/src/com/android/tradefed/config/IConfigurationFactory.java
index b49d7de..d910002 100644
--- a/src/com/android/tradefed/config/IConfigurationFactory.java
+++ b/src/com/android/tradefed/config/IConfigurationFactory.java
@@ -142,4 +142,14 @@
* the same behavior as {@link #getConfigList()}.
*/
public List<String> getConfigList(String subPath);
+
+ /**
+ * Variation of {@link #getConfigList(String)} where can specify whether or not we also want to
+ * load the configuration from the environment.
+ *
+ * @param subPath name of the sub-directories to look in for configuration. If null, will have
+ * the same behavior as {@link #getConfigList()}.
+ * @param loadFromEnv True if we should load the configuration in the environment variable.
+ */
+ public List<String> getConfigList(String subPath, boolean loadFromEnv);
}
diff --git a/src/com/android/tradefed/config/SandboxConfigurationFactory.java b/src/com/android/tradefed/config/SandboxConfigurationFactory.java
new file mode 100644
index 0000000..490413d
--- /dev/null
+++ b/src/com/android/tradefed/config/SandboxConfigurationFactory.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.config;
+
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.sandbox.ISandbox;
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.sandbox.SandboxConfigUtil;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.keystore.IKeyStoreClient;
+
+import java.io.File;
+import java.util.Map;
+
+/** Special Configuration factory to handle creation of configurations for Sandboxing purpose. */
+public class SandboxConfigurationFactory extends ConfigurationFactory {
+
+ private static SandboxConfigurationFactory sInstance = null;
+
+ /** Get the singleton {@link IConfigurationFactory} instance. */
+ public static SandboxConfigurationFactory getInstance() {
+ if (sInstance == null) {
+ sInstance = new SandboxConfigurationFactory();
+ }
+ return sInstance;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ ConfigurationDef getConfigurationDef(
+ String name, boolean isGlobal, Map<String, String> templateMap)
+ throws ConfigurationException {
+ // TODO: Extend ConfigurationDef to possibly create a different IConfiguration type and
+ // handle more elegantly the parent/subprocess incompatibilities.
+ ConfigurationDef def = new ConfigurationDef(name);
+ new ConfigLoader(isGlobal).loadConfiguration(name, def, null, templateMap);
+ return def;
+ }
+
+ /**
+ * Create a {@link IConfiguration} based on the command line and sandbox provided.
+ *
+ * @param args the command line for the run.
+ * @param keyStoreClient the {@link IKeyStoreClient} where to load the key from.
+ * @param sandbox the {@link ISandbox} used for the run.
+ * @param runUtil the {@link IRunUtil} to run commands.
+ * @return a {@link IConfiguration} valid for the sandbox.
+ * @throws ConfigurationException
+ */
+ public IConfiguration createConfigurationFromArgs(
+ String[] args, IKeyStoreClient keyStoreClient, ISandbox sandbox, IRunUtil runUtil)
+ throws ConfigurationException {
+ IConfiguration config = null;
+ File xmlConfig = null;
+ try {
+ runUtil.unsetEnvVariable(GlobalConfiguration.GLOBAL_CONFIG_VARIABLE);
+ File tfDir = sandbox.getTradefedEnvironment(args);
+ // TODO: dump using the keystore too
+ xmlConfig =
+ SandboxConfigUtil.dumpConfigForVersion(
+ tfDir, runUtil, args, DumpCmd.NON_VERSIONED_CONFIG);
+ // Get the non version part of the configuration in order to do proper allocation
+ // of devices and such.
+ config =
+ super.createConfigurationFromArgs(
+ new String[] {xmlConfig.getAbsolutePath()}, null, keyStoreClient);
+ // Reset the command line to the original one.
+ config.setCommandLine(args);
+ config.setConfigurationObject(Configuration.SANDBOX_TYPE_NAME, sandbox);
+ } catch (ConfigurationException e) {
+ CLog.e(e);
+ sandbox.tearDown();
+ throw e;
+ } finally {
+ FileUtil.deleteFile(xmlConfig);
+ }
+ return config;
+ }
+}
diff --git a/src/com/android/tradefed/device/DeviceManager.java b/src/com/android/tradefed/device/DeviceManager.java
index d31fee2..36c2bd7 100644
--- a/src/com/android/tradefed/device/DeviceManager.java
+++ b/src/com/android/tradefed/device/DeviceManager.java
@@ -229,7 +229,7 @@
/** Initialize adb connection and services depending on adb connection. */
private synchronized void startAdbBridgeAndDependentServices() {
// TODO: Temporarily increase default timeout as workaround for syncFiles timeouts
- DdmPreferences.setTimeOut(30 * 1000);
+ DdmPreferences.setTimeOut(120 * 1000);
mAdbBridge = createAdbBridge();
mManagedDeviceListener = new ManagedDeviceListener();
// It's important to add the listener before initializing the ADB bridge to avoid a race
@@ -1026,30 +1026,56 @@
public void deviceConnected(IDevice idevice) {
CLog.d("Detected device connect %s, id %d", idevice.getSerialNumber(),
idevice.hashCode());
- IManagedTestDevice testDevice = mManagedDeviceList.findOrCreate(idevice);
- if (testDevice == null) {
- return;
+ String threadName = String.format("Connected device %s", idevice.getSerialNumber());
+ Runnable connectedRunnable =
+ new Runnable() {
+ @Override
+ public void run() {
+ IManagedTestDevice testDevice =
+ mManagedDeviceList.findOrCreate(idevice);
+ if (testDevice == null) {
+ return;
+ }
+ // DDMS will allocate a new IDevice, so need
+ // to update the TestDevice record with the new device
+ CLog.d("Updating IDevice for device %s", idevice.getSerialNumber());
+ testDevice.setIDevice(idevice);
+ TestDeviceState newState =
+ TestDeviceState.getStateByDdms(idevice.getState());
+ testDevice.setDeviceState(newState);
+ if (newState == TestDeviceState.ONLINE) {
+ DeviceEventResponse r =
+ mManagedDeviceList.handleDeviceEvent(
+ testDevice, DeviceEvent.CONNECTED_ONLINE);
+ if (r.stateChanged
+ && r.allocationState
+ == DeviceAllocationState.Checking_Availability) {
+ checkAndAddAvailableDevice(testDevice);
+ }
+ logDeviceEvent(
+ EventType.DEVICE_CONNECTED, testDevice.getSerialNumber());
+ } else if (DeviceState.OFFLINE.equals(idevice.getState())
+ || DeviceState.UNAUTHORIZED.equals(idevice.getState())) {
+ mManagedDeviceList.handleDeviceEvent(
+ testDevice, DeviceEvent.CONNECTED_OFFLINE);
+ logDeviceEvent(
+ EventType.DEVICE_CONNECTED_OFFLINE,
+ testDevice.getSerialNumber());
+ }
+ mFirstDeviceAdded.countDown();
+ }
+ };
+
+ if (mSynchronousMode) {
+ connectedRunnable.run();
+ } else {
+ // Device creation step can take a little bit of time, so do it in a thread to
+ // avoid blocking following events of new devices
+ Thread checkThread = new Thread(connectedRunnable, threadName);
+ // Device checking threads shouldn't hold the JVM open
+ checkThread.setDaemon(true);
+ checkThread.start();
}
- // DDMS will allocate a new IDevice, so need
- // to update the TestDevice record with the new device
- CLog.d("Updating IDevice for device %s", idevice.getSerialNumber());
- testDevice.setIDevice(idevice);
- TestDeviceState newState = TestDeviceState.getStateByDdms(idevice.getState());
- testDevice.setDeviceState(newState);
- if (newState == TestDeviceState.ONLINE) {
- DeviceEventResponse r = mManagedDeviceList.handleDeviceEvent(testDevice,
- DeviceEvent.CONNECTED_ONLINE);
- if (r.stateChanged && r.allocationState ==
- DeviceAllocationState.Checking_Availability) {
- checkAndAddAvailableDevice(testDevice);
- }
- logDeviceEvent(EventType.DEVICE_CONNECTED, testDevice.getSerialNumber());
- } else if (DeviceState.OFFLINE.equals(idevice.getState()) ||
- DeviceState.UNAUTHORIZED.equals(idevice.getState())) {
- mManagedDeviceList.handleDeviceEvent(testDevice, DeviceEvent.CONNECTED_OFFLINE);
- logDeviceEvent(EventType.DEVICE_CONNECTED_OFFLINE, testDevice.getSerialNumber());
- }
- mFirstDeviceAdded.countDown();
}
/**
diff --git a/src/com/android/tradefed/device/FastbootHelper.java b/src/com/android/tradefed/device/FastbootHelper.java
index 57e954a..039de7d 100644
--- a/src/com/android/tradefed/device/FastbootHelper.java
+++ b/src/com/android/tradefed/device/FastbootHelper.java
@@ -81,8 +81,8 @@
* @return a set of device serials.
*/
public Set<String> getDevices() {
- CommandResult fastbootResult = mRunUtil.runTimedCmd(FASTBOOT_CMD_TIMEOUT,
- mFastbootPath, "devices");
+ CommandResult fastbootResult =
+ mRunUtil.runTimedCmdSilently(FASTBOOT_CMD_TIMEOUT, mFastbootPath, "devices");
if (fastbootResult.getStatus().equals(CommandStatus.SUCCESS)) {
CLog.v("fastboot devices returned\n %s",
fastbootResult.getStdout());
diff --git a/src/com/android/tradefed/device/INativeDevice.java b/src/com/android/tradefed/device/INativeDevice.java
index 866f807..388b52b 100644
--- a/src/com/android/tradefed/device/INativeDevice.java
+++ b/src/com/android/tradefed/device/INativeDevice.java
@@ -225,6 +225,33 @@
throws DeviceNotAvailableException;
/**
+ * Executes a adb shell command, with more parameters to control command behavior.
+ *
+ * @see #executeShellCommand(String, IShellOutputReceiver)
+ * @param command the adb shell command to run
+ * @param receiver the {@link IShellOutputReceiver} to direct shell output to.
+ * @param maxTimeoutForCommand the maximum timeout for the command to complete; unit as
+ * specified in <code>timeUnit</code>
+ * @param maxTimeToOutputShellResponse the maximum amount of time during which the command is
+ * allowed to not output any response; unit as specified in <code>timeUnit</code>
+ * @param timeUnit unit for <code>maxTimeToOutputShellResponse</code>
+ * @param retryAttempts the maximum number of times to retry command if it fails due to a
+ * exception. DeviceNotResponsiveException will be thrown if <var>retryAttempts</var> are
+ * performed without success.
+ * @throws DeviceNotAvailableException if connection with device is lost and cannot be
+ * recovered.
+ * @see TimeUtil
+ */
+ public void executeShellCommand(
+ String command,
+ IShellOutputReceiver receiver,
+ long maxTimeoutForCommand,
+ long maxTimeToOutputShellResponse,
+ TimeUnit timeUnit,
+ int retryAttempts)
+ throws DeviceNotAvailableException;
+
+ /**
* Helper method which executes a adb shell command and returns output as a {@link String}.
*
* @param command the adb shell command to run
@@ -1058,4 +1085,7 @@
* @return ProcessInfo of given processName
*/
public ProcessInfo getProcessByName(String processName) throws DeviceNotAvailableException;
+
+ /** Returns the pid of the service or null if something went wrong. */
+ public String getProcessPid(String process) throws DeviceNotAvailableException;
}
diff --git a/src/com/android/tradefed/device/ITestDevice.java b/src/com/android/tradefed/device/ITestDevice.java
index cbc8c7c..746b2c6 100644
--- a/src/com/android/tradefed/device/ITestDevice.java
+++ b/src/com/android/tradefed/device/ITestDevice.java
@@ -621,4 +621,16 @@
* on non-secure ones only)
*/
public void disableKeyguard() throws DeviceNotAvailableException;
+
+ /**
+ * Attempt to dump the heap from the system_server. It is the caller responsibility to clean up
+ * the dumped file.
+ *
+ * @param process the name of the device process to dumpheap on.
+ * @param devicePath the path on the device where to put the dump. This must be a location where
+ * permissions allow it.
+ * @return the {@link File} containing the report. Null if something failed.
+ * @throws DeviceNotAvailableException
+ */
+ public File dumpHeap(String process, String devicePath) throws DeviceNotAvailableException;
}
diff --git a/src/com/android/tradefed/device/NativeDevice.java b/src/com/android/tradefed/device/NativeDevice.java
index e6c9023..0c9221c 100644
--- a/src/com/android/tradefed/device/NativeDevice.java
+++ b/src/com/android/tradefed/device/NativeDevice.java
@@ -156,7 +156,6 @@
static final String MAC_ADDRESS_PATTERN = "([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}";
static final String MAC_ADDRESS_COMMAND = "su root cat /sys/class/net/wlan0/address";
-
/** The network monitoring interval in ms. */
private static final int NETWORK_MONITOR_INTERVAL = 10 * 1000;
@@ -368,6 +367,9 @@
*/
@Override
public String getProperty(final String name) throws DeviceNotAvailableException {
+ if (getIDevice() instanceof StubDevice) {
+ return null;
+ }
if (!DeviceState.ONLINE.equals(getIDevice().getState())) {
CLog.d("Device %s is not online cannot get property %s.", getSerialNumber(), name);
return null;
@@ -579,6 +581,35 @@
performDeviceAction(String.format("shell %s", command), action, retryAttempts);
}
+ /** {@inheritDoc} */
+ @Override
+ public void executeShellCommand(
+ final String command,
+ final IShellOutputReceiver receiver,
+ final long maxTimeoutForCommand,
+ final long maxTimeToOutputShellResponse,
+ final TimeUnit timeUnit,
+ final int retryAttempts)
+ throws DeviceNotAvailableException {
+ DeviceAction action =
+ new DeviceAction() {
+ @Override
+ public boolean run()
+ throws TimeoutException, IOException, AdbCommandRejectedException,
+ ShellCommandUnresponsiveException {
+ getIDevice()
+ .executeShellCommand(
+ command,
+ receiver,
+ maxTimeoutForCommand,
+ maxTimeToOutputShellResponse,
+ timeUnit);
+ return true;
+ }
+ };
+ performDeviceAction(String.format("shell %s", command), action, retryAttempts);
+ }
+
/**
* {@inheritDoc}
*/
@@ -3799,4 +3830,33 @@
return null;
}
}
+
+ @Override
+ public File dumpHeap(String process, String devicePath) throws DeviceNotAvailableException {
+ throw new UnsupportedOperationException("dumpHeap is not supported.");
+ }
+
+ @Override
+ public String getProcessPid(String process) throws DeviceNotAvailableException {
+ String output = executeShellCommand(String.format("pidof %s", process)).trim();
+ if (checkValidPid(output)) {
+ return output;
+ }
+ CLog.e("Failed to find a valid pid for process.");
+ return null;
+ }
+
+ /** Validate that pid is an integer and not empty. */
+ private boolean checkValidPid(String output) {
+ if (output.isEmpty()) {
+ return false;
+ }
+ try {
+ Integer.parseInt(output);
+ } catch (NumberFormatException e) {
+ CLog.e(e);
+ return false;
+ }
+ return true;
+ }
}
diff --git a/src/com/android/tradefed/device/TestDevice.java b/src/com/android/tradefed/device/TestDevice.java
index 66285f4..e5ab1d4 100644
--- a/src/com/android/tradefed/device/TestDevice.java
+++ b/src/com/android/tradefed/device/TestDevice.java
@@ -30,6 +30,7 @@
import com.android.tradefed.util.StreamUtil;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
import java.awt.Image;
import java.awt.image.BufferedImage;
@@ -89,6 +90,11 @@
/** Timeout to wait for a screenshot before giving up to avoid hanging forever */
private static final long MAX_SCREENSHOT_TIMEOUT = 5 * 60 * 1000; // 5 min
+ /** adb shell am dumpheap <service pid> <dump file path> */
+ private static final String DUMPHEAP_CMD = "am dumpheap %s %s";
+ /** Time given to a file to be dumped on device side */
+ private static final long DUMPHEAP_TIME = 5000l;
+
/**
* @param device
* @param stateMonitor
@@ -1215,4 +1221,33 @@
+ "Must be API %d.", feature, getSerialNumber(), strictMinLevel));
}
}
+
+ @Override
+ public File dumpHeap(String process, String devicePath) throws DeviceNotAvailableException {
+ if (Strings.isNullOrEmpty(devicePath) || Strings.isNullOrEmpty(process)) {
+ throw new IllegalArgumentException("devicePath or process cannot be null or empty.");
+ }
+ String pid = getProcessPid(process);
+ if (pid == null) {
+ return null;
+ }
+ File dump = dumpAndPullHeap(pid, devicePath);
+ // Clean the device.
+ executeShellCommand(String.format("rm %s", devicePath));
+ return dump;
+ }
+
+ /** Dump the heap file and pull it from the device. */
+ private File dumpAndPullHeap(String pid, String devicePath) throws DeviceNotAvailableException {
+ executeShellCommand(String.format(DUMPHEAP_CMD, pid, devicePath));
+ // Allow a little bit of time for the file to populate on device side.
+ int attempt = 0;
+ // TODO: add an API to check device file size
+ while (!doesFileExist(devicePath) && attempt < 3) {
+ getRunUtil().sleep(DUMPHEAP_TIME);
+ attempt++;
+ }
+ File dumpFile = pullFile(devicePath);
+ return dumpFile;
+ }
}
diff --git a/src/com/android/tradefed/device/metric/BaseDeviceMetricCollector.java b/src/com/android/tradefed/device/metric/BaseDeviceMetricCollector.java
new file mode 100644
index 0000000..9f6fbc0
--- /dev/null
+++ b/src/com/android/tradefed/device/metric/BaseDeviceMetricCollector.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.tradefed.build.IBuildInfo;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.result.InputStreamSource;
+import com.android.tradefed.result.LogDataType;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Base implementation of {@link IMetricCollector} that allows to start and stop collection on
+ * {@link #onTestRunStart(DeviceMetricData)} and {@link #onTestRunEnd(DeviceMetricData)}.
+ */
+public class BaseDeviceMetricCollector implements IMetricCollector {
+
+ private IInvocationContext mContext;
+ private ITestInvocationListener mForwarder;
+ private DeviceMetricData mRunData;
+
+ @Override
+ public ITestInvocationListener init(
+ IInvocationContext context, ITestInvocationListener listener) {
+ mContext = context;
+ mForwarder = listener;
+ return this;
+ }
+
+ @Override
+ public List<ITestDevice> getDevices() {
+ return mContext.getDevices();
+ }
+
+ @Override
+ public List<IBuildInfo> getBuildInfos() {
+ return mContext.getBuildInfos();
+ }
+
+ @Override
+ public ITestInvocationListener getInvocationListener() {
+ return mForwarder;
+ }
+
+ @Override
+ public void onTestRunStart(DeviceMetricData runData) {
+ // Does nothing
+ }
+
+ @Override
+ public void onTestRunEnd(DeviceMetricData runData) {
+ // Does nothing
+ }
+
+ /** =================================== */
+ /** Invocation Listeners for forwarding */
+ @Override
+ public final void invocationStarted(IInvocationContext context) {
+ mForwarder.invocationStarted(context);
+ }
+
+ @Override
+ public final void invocationFailed(Throwable cause) {
+ mForwarder.invocationFailed(cause);
+ }
+
+ @Override
+ public final void invocationEnded(long elapsedTime) {
+ mForwarder.invocationEnded(elapsedTime);
+ }
+
+ @Override
+ public final void testLog(String dataName, LogDataType dataType, InputStreamSource dataStream) {
+ mForwarder.testLog(dataName, dataType, dataStream);
+ }
+
+ /** Test run callbacks */
+ @Override
+ public final void testRunStarted(String runName, int testCount) {
+ mRunData = new DeviceMetricData();
+ onTestRunStart(mRunData);
+ mForwarder.testRunStarted(runName, testCount);
+ }
+
+ @Override
+ public final void testRunFailed(String errorMessage) {
+ mForwarder.testRunFailed(errorMessage);
+ }
+
+ @Override
+ public final void testRunStopped(long elapsedTime) {
+ mForwarder.testRunStopped(elapsedTime);
+ }
+
+ @Override
+ public final void testRunEnded(long elapsedTime, Map<String, String> runMetrics) {
+ onTestRunEnd(mRunData);
+ mRunData.addToMetrics(runMetrics);
+ mForwarder.testRunEnded(elapsedTime, runMetrics);
+ }
+
+ /** Test cases callbacks */
+ @Override
+ public final void testStarted(TestIdentifier test) {
+ testStarted(test, System.currentTimeMillis());
+ }
+
+ @Override
+ public final void testStarted(TestIdentifier test, long startTime) {
+ mForwarder.testStarted(test, startTime);
+ }
+
+ @Override
+ public final void testFailed(TestIdentifier test, String trace) {
+ mForwarder.testFailed(test, trace);
+ }
+
+ @Override
+ public final void testEnded(TestIdentifier test, Map<String, String> testMetrics) {
+ testEnded(test, System.currentTimeMillis(), testMetrics);
+ }
+
+ @Override
+ public final void testEnded(
+ TestIdentifier test, long endTime, Map<String, String> testMetrics) {
+ mForwarder.testEnded(test, endTime, testMetrics);
+ }
+
+ @Override
+ public final void testAssumptionFailure(TestIdentifier test, String trace) {
+ mForwarder.testAssumptionFailure(test, trace);
+ }
+
+ @Override
+ public final void testIgnored(TestIdentifier test) {
+ mForwarder.testIgnored(test);
+ }
+}
diff --git a/src/com/android/tradefed/device/metric/DeviceMetricData.java b/src/com/android/tradefed/device/metric/DeviceMetricData.java
new file mode 100644
index 0000000..d31324d
--- /dev/null
+++ b/src/com/android/tradefed/device/metric/DeviceMetricData.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import java.io.Serializable;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Object to hold all the data collected by metric collectors. TODO: Add the data holding and
+ * receiving of data methods.
+ */
+public class DeviceMetricData implements Serializable {
+ private static final long serialVersionUID = 1;
+
+ // TODO: expend type supports to more complex type: Object, File, etc.
+ private LinkedHashMap<String, String> mCurrentStringMetrics = new LinkedHashMap<>();
+
+ public void addStringMetric(String key, String value) {
+ mCurrentStringMetrics.put(key, value);
+ }
+
+ /**
+ * Push all the data received so far to the map of metrics that will be reported. This should
+ * also clean up the resources after pushing them.
+ *
+ * @param metrics The metrics currently available.
+ */
+ public void addToMetrics(Map<String, String> metrics) {
+ // TODO: dump all the metrics collected to the map of metrics to be reported.
+ metrics.putAll(mCurrentStringMetrics);
+ }
+}
diff --git a/src/com/android/tradefed/device/metric/IMetricCollector.java b/src/com/android/tradefed/device/metric/IMetricCollector.java
new file mode 100644
index 0000000..b6d0ef7
--- /dev/null
+++ b/src/com/android/tradefed/device/metric/IMetricCollector.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import com.android.tradefed.build.IBuildInfo;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+
+import java.util.List;
+
+/**
+ * This interface will be added as a decorator when reporting tests results in order to collect
+ * matching metrics.
+ */
+public interface IMetricCollector extends ITestInvocationListener {
+
+ /**
+ * Initialization of the collector with the current context and where to forward results.
+ *
+ * @param context the {@link IInvocationContext} for the invocation in progress.
+ * @param listener the {@link ITestInvocationListener} where to put results.
+ * @return the new listener wrapping the original one.
+ */
+ public ITestInvocationListener init(
+ IInvocationContext context, ITestInvocationListener listener);
+
+ /** Returns the list of devices available in the invocation. */
+ public List<ITestDevice> getDevices();
+
+ /** Returns the list of build information available in the invocation. */
+ public List<IBuildInfo> getBuildInfos();
+
+ /** Returns the original {@link ITestInvocationListener} where we are forwarding the results. */
+ public ITestInvocationListener getInvocationListener();
+
+ /**
+ * Callback when a test run is started.
+ *
+ * @param runData the {@link DeviceMetricData} holding the data for the run.
+ */
+ public void onTestRunStart(DeviceMetricData runData);
+
+ /**
+ * Callback when a test run is ended. This should be the time for clean up.
+ *
+ * @param runData the {@link DeviceMetricData} holding the data for the run. Will be the same
+ * object as during {@link #onTestRunStart(DeviceMetricData)}.
+ */
+ public void onTestRunEnd(DeviceMetricData runData);
+}
diff --git a/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollector.java b/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollector.java
new file mode 100644
index 0000000..7c1f9a7
--- /dev/null
+++ b/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollector.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import com.android.tradefed.config.Option;
+import com.android.tradefed.log.LogUtil.CLog;
+
+import java.util.Timer;
+import java.util.TimerTask;
+
+/**
+ * A {@link IMetricCollector} that allows to run a collection task periodically at a set interval.
+ */
+public abstract class ScheduledDeviceMetricCollector extends BaseDeviceMetricCollector {
+
+ @Option(
+ name = "fixed-schedule-rate",
+ description = "Schedule the timetask as a fixed schedule rate"
+ )
+ private boolean mFixedScheduleRate = false;
+
+ @Option(
+ name = "interval",
+ description = "the interval between two tasks being scheduled",
+ isTimeVal = true
+ )
+ private long mIntervalMs = 60 * 1000l;
+
+ private Timer timer;
+
+ @Override
+ public final void onTestRunStart(DeviceMetricData runData) {
+ CLog.d("starting");
+ onStart(runData);
+ timer = new Timer();
+ TimerTask timerTask =
+ new TimerTask() {
+ @Override
+ public void run() {
+ try {
+ collect(runData);
+ } catch (InterruptedException e) {
+ timer.cancel();
+ Thread.currentThread().interrupt();
+ CLog.e("Interrupted exception thrown from task:");
+ CLog.e(e);
+ }
+ }
+ };
+
+ if (mFixedScheduleRate) {
+ timer.scheduleAtFixedRate(timerTask, 0, mIntervalMs);
+ } else {
+ timer.schedule(timerTask, 0, mIntervalMs);
+ }
+ }
+
+ @Override
+ public final void onTestRunEnd(DeviceMetricData runData) {
+ if (timer != null) {
+ timer.cancel();
+ timer.purge();
+ }
+ onEnd(runData);
+ CLog.d("finished");
+ }
+
+ /**
+ * Task periodically & asynchronously run during the test running.
+ *
+ * @param runData the {@link DeviceMetricData} where to put metrics.
+ * @throws InterruptedException
+ */
+ abstract void collect(DeviceMetricData runData) throws InterruptedException;
+
+ /**
+ * Executed when entering this collector.
+ *
+ * @param runData the {@link DeviceMetricData} where to put metrics.
+ */
+ void onStart(DeviceMetricData runData) {
+ // Does nothing.
+ }
+
+ /**
+ * Executed when finishing this collector.
+ *
+ * @param runData the {@link DeviceMetricData} where to put metrics.
+ */
+ void onEnd(DeviceMetricData runData) {
+ // Does nothing.
+ }
+}
diff --git a/src/com/android/tradefed/invoker/ITestInvocation.java b/src/com/android/tradefed/invoker/ITestInvocation.java
index 0214071..58ec3de 100644
--- a/src/com/android/tradefed/invoker/ITestInvocation.java
+++ b/src/com/android/tradefed/invoker/ITestInvocation.java
@@ -18,7 +18,10 @@
import com.android.tradefed.config.IConfiguration;
import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.invoker.shard.IShardHelper;
import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.targetprep.BuildError;
+import com.android.tradefed.targetprep.TargetSetupError;
/**
* Handles one TradeFederation test invocation.
@@ -41,6 +44,93 @@
IRescheduler rescheduler, ITestInvocationListener... extraListeners)
throws DeviceNotAvailableException, Throwable;
+ /**
+ * Execute the build_provider step of the invocation.
+ *
+ * @param context the {@link IInvocationContext} of the invocation.
+ * @param config the {@link IConfiguration} of this test run.
+ * @param rescheduler the {@link IRescheduler}, for rescheduling portions of the invocation for
+ * execution on another resource(s)
+ * @param listener the {@link ITestInvocation} to report build download failures.
+ * @return True if we successfully downloaded the build, false otherwise.
+ * @throws DeviceNotAvailableException
+ */
+ public default boolean fetchBuild(
+ IInvocationContext context,
+ IConfiguration config,
+ IRescheduler rescheduler,
+ ITestInvocationListener listener)
+ throws DeviceNotAvailableException {
+ return false;
+ }
+
+ /**
+ * Execute the build_provider clean up step. Associated with the build fetching.
+ *
+ * @param context the {@link IInvocationContext} of the invocation.
+ * @param config the {@link IConfiguration} of this test run.
+ */
+ public default void cleanUpBuilds(IInvocationContext context, IConfiguration config) {}
+
+ /**
+ * Execute the target_preparer and multi_target_preparer setUp step. Does all the devices setup
+ * required for the test to run.
+ *
+ * @param context the {@link IInvocationContext} of the invocation.
+ * @param config the {@link IConfiguration} of this test run.
+ * @param listener the {@link ITestInvocation} to report setup failures.
+ * @throws TargetSetupError
+ * @throws BuildError
+ * @throws DeviceNotAvailableException
+ */
+ public default void doSetup(
+ IInvocationContext context,
+ IConfiguration config,
+ final ITestInvocationListener listener)
+ throws TargetSetupError, BuildError, DeviceNotAvailableException {}
+
+ /**
+ * Execute the target_preparer and multi_target_preparer teardown step. Does the devices tear
+ * down associated with the setup.
+ *
+ * @param context the {@link IInvocationContext} of the invocation.
+ * @param config the {@link IConfiguration} of this test run.
+ * @param exception the original exception thrown by the test running.
+ * @throws Throwable
+ */
+ public default void doTeardown(
+ IInvocationContext context, IConfiguration config, Throwable exception)
+ throws Throwable {}
+
+ /**
+ * Execute the target_preparer and multi_target_preparer cleanUp step. Does the devices clean
+ * up.
+ *
+ * @param context the {@link IInvocationContext} of the invocation.
+ * @param config the {@link IConfiguration} of this test run.
+ * @param exception the original exception thrown by the test running.
+ */
+ public default void doCleanUp(
+ IInvocationContext context, IConfiguration config, Throwable exception) {}
+
+ /**
+ * Attempt to shard the configuration into sub-configurations, to be re-scheduled to run on
+ * multiple resources in parallel.
+ *
+ * <p>If a shard count is greater than 1, it will simply create configs for each shard by
+ * setting shard indices and reschedule them. If a shard count is not set,it would fallback to
+ * {@link IShardHelper#shardConfig}.
+ *
+ * @param config the current {@link IConfiguration}.
+ * @param context the {@link IInvocationContext} holding the info of the tests.
+ * @param rescheduler the {@link IRescheduler}
+ * @return true if test was sharded. Otherwise return <code>false</code>
+ */
+ public default boolean shardConfig(
+ IConfiguration config, IInvocationContext context, IRescheduler rescheduler) {
+ return false;
+ }
+
/** Notify the {@link TestInvocation} that TradeFed has been requested to stop. */
public default void notifyInvocationStopped() {}
}
diff --git a/src/com/android/tradefed/invoker/InvocationContext.java b/src/com/android/tradefed/invoker/InvocationContext.java
index f8a4be4..0ebe25c 100644
--- a/src/com/android/tradefed/invoker/InvocationContext.java
+++ b/src/com/android/tradefed/invoker/InvocationContext.java
@@ -29,7 +29,6 @@
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -61,7 +60,7 @@
* Creates a {@link BuildInfo} using default attribute values.
*/
public InvocationContext() {
- mAllocatedDeviceAndBuildMap = new HashMap<ITestDevice, IBuildInfo>();
+ mAllocatedDeviceAndBuildMap = new LinkedHashMap<ITestDevice, IBuildInfo>();
// Use LinkedHashMap to ensure key ordering by insertion order
mNameAndDeviceMap = new LinkedHashMap<String, ITestDevice>();
mNameAndBuildinfoMap = new LinkedHashMap<String, IBuildInfo>();
@@ -298,7 +297,7 @@
// our "pseudo-constructor"
in.defaultReadObject();
// now we are a "live" object again, so let's init the transient field
- mAllocatedDeviceAndBuildMap = new HashMap<ITestDevice, IBuildInfo>();
+ mAllocatedDeviceAndBuildMap = new LinkedHashMap<ITestDevice, IBuildInfo>();
mNameAndDeviceMap = new LinkedHashMap<String, ITestDevice>();
}
}
diff --git a/src/com/android/tradefed/invoker/ShardListener.java b/src/com/android/tradefed/invoker/ShardListener.java
index ea0f1fb..8aa5625 100644
--- a/src/com/android/tradefed/invoker/ShardListener.java
+++ b/src/com/android/tradefed/invoker/ShardListener.java
@@ -113,7 +113,19 @@
super.invocationEnded(elapsedTime);
synchronized (mMasterListener) {
logShardContent(getRunResults());
+ IInvocationContext moduleContext = null;
for (TestRunResult runResult : getRunResults()) {
+ // Stop or start the module
+ if (moduleContext != null
+ && !getModuleContextForRunResult(runResult).equals(moduleContext)) {
+ mMasterListener.testModuleEnded();
+ moduleContext = null;
+ }
+ if (moduleContext == null && getModuleContextForRunResult(runResult) != null) {
+ moduleContext = getModuleContextForRunResult(runResult);
+ mMasterListener.testModuleStarted(moduleContext);
+ }
+
mMasterListener.testRunStarted(runResult.getName(), runResult.getNumTests());
forwardTestResults(runResult.getTestResults());
if (runResult.isRunFailure()) {
@@ -121,6 +133,11 @@
}
mMasterListener.testRunEnded(runResult.getElapsedTime(), runResult.getRunMetrics());
}
+ // Close the last module
+ if (moduleContext != null) {
+ mMasterListener.testModuleEnded();
+ moduleContext = null;
+ }
mMasterListener.invocationEnded(elapsedTime);
}
}
diff --git a/src/com/android/tradefed/invoker/TestInvocation.java b/src/com/android/tradefed/invoker/TestInvocation.java
index 99d1ca7..13cdae0 100644
--- a/src/com/android/tradefed/invoker/TestInvocation.java
+++ b/src/com/android/tradefed/invoker/TestInvocation.java
@@ -32,6 +32,7 @@
import com.android.tradefed.device.ITestDevice.RecoveryMode;
import com.android.tradefed.device.StubDevice;
import com.android.tradefed.device.TestDeviceState;
+import com.android.tradefed.device.metric.IMetricCollector;
import com.android.tradefed.invoker.shard.IShardHelper;
import com.android.tradefed.invoker.shard.ShardBuildCloner;
import com.android.tradefed.log.ILeveledLogOutput;
@@ -45,6 +46,7 @@
import com.android.tradefed.result.LogDataType;
import com.android.tradefed.result.LogSaverResultForwarder;
import com.android.tradefed.result.ResultForwarder;
+import com.android.tradefed.sandbox.SandboxInvocationRunner;
import com.android.tradefed.suite.checker.ISystemStatusCheckerReceiver;
import com.android.tradefed.targetprep.BuildError;
import com.android.tradefed.targetprep.DeviceFailedToBootError;
@@ -95,6 +97,8 @@
* invocation. (Setup, test, tear down).
*/
private static final String BATTERY_ATTRIBUTE_FORMAT_KEY = "%s-battery-%s";
+ /** Key of the command line args attributes */
+ private static final String COMMAND_ARGS_KEY = "command_line_args";
static final String TRADEFED_LOG_NAME = "host_log";
static final String DEVICE_LOG_NAME_PREFIX = "device_logcat_";
@@ -155,20 +159,8 @@
}
}
- /**
- * Attempt to shard the configuration into sub-configurations, to be re-scheduled to run on
- * multiple resources in parallel.
- *
- * <p>If a shard count is greater than 1, it will simply create configs for each shard by
- * setting shard indices and reschedule them. If a shard count is not set,it would fallback to
- * {@link IShardHelper#shardConfig}.
- *
- * @param config the current {@link IConfiguration}.
- * @param context the {@link IInvocationContext} holding the info of the tests.
- * @param rescheduler the {@link IRescheduler}
- * @return true if test was sharded. Otherwise return <code>false</code>
- */
- private boolean shardConfig(
+ @Override
+ public boolean shardConfig(
IConfiguration config, IInvocationContext context, IRescheduler rescheduler) {
mStatus = "sharding";
return createShardHelper().shardConfig(config, context, rescheduler);
@@ -189,7 +181,7 @@
private void updateBuild(IBuildInfo info, IConfiguration config) {
if (config.getCommandLine() != null) {
// TODO: obfuscate the password if any.
- info.addBuildAttribute("command_line_args", config.getCommandLine());
+ info.addBuildAttribute(COMMAND_ARGS_KEY, config.getCommandLine());
}
if (config.getCommandOptions().getShardCount() != null) {
info.addBuildAttribute("shard_count",
@@ -247,7 +239,7 @@
// TODO: Once reporting on context is done, only set context attributes
if (config.getCommandLine() != null) {
// TODO: obfuscate the password if any.
- context.addInvocationAttribute("command_line_args", config.getCommandLine());
+ context.addInvocationAttribute(COMMAND_ARGS_KEY, config.getCommandLine());
}
if (config.getCommandOptions().getShardCount() != null) {
context.addInvocationAttribute("shard_count",
@@ -414,7 +406,7 @@
}
mStatus = "tearing down";
try {
- doTeardown(config, context, exception);
+ doTeardown(context, config, exception);
} catch (Throwable e) {
tearDownException = e;
CLog.e("Exception when tearing down invocation: %s", tearDownException.toString());
@@ -427,7 +419,7 @@
mStatus = "done running tests";
try {
// Clean up host.
- doCleanUp(config, context, exception);
+ doCleanUp(context, config, exception);
if (config.getProfiler() != null) {
config.getProfiler().reportAllMetrics(listener);
}
@@ -451,10 +443,7 @@
listener.invocationEnded(elapsedTime);
}
} finally {
- for (String deviceName : context.getDeviceConfigNames()) {
- config.getDeviceConfigByName(deviceName).getBuildProvider()
- .cleanUp(context.getBuildInfo(deviceName));
- }
+ cleanUpBuilds(context, config);
}
}
if (tearDownException != null) {
@@ -469,18 +458,24 @@
private void prepareAndRun(
IConfiguration config, IInvocationContext context, ITestInvocationListener listener)
throws Throwable {
+ if (config.getCommandOptions().shouldUseSandboxing()) {
+ // TODO: extract in new TestInvocation type.
+ // If the invocation is sandboxed run as a sandbox instead.
+ SandboxInvocationRunner.prepareAndRun(config, context, listener);
+ return;
+ }
getRunUtil().allowInterrupt(true);
logDeviceBatteryLevel(context, "initial -> setup");
- doSetup(config, context, listener);
+ doSetup(context, config, listener);
logDeviceBatteryLevel(context, "setup -> test");
runTests(context, config, listener);
logDeviceBatteryLevel(context, "after test");
}
- @VisibleForTesting
- void doSetup(
- IConfiguration config,
+ @Override
+ public void doSetup(
IInvocationContext context,
+ IConfiguration config,
final ITestInvocationListener listener)
throws TargetSetupError, BuildError, DeviceNotAvailableException {
// TODO: evaluate doing device setup in parallel
@@ -527,8 +522,9 @@
}
}
- private void doTeardown(IConfiguration config, IInvocationContext context,
- Throwable exception) throws Throwable {
+ @Override
+ public void doTeardown(IInvocationContext context, IConfiguration config, Throwable exception)
+ throws Throwable {
Throwable throwable = null;
List<IMultiTargetPreparer> multiPreparers = config.getMultiTargetPreparers();
@@ -579,8 +575,8 @@
}
}
- private void doCleanUp(IConfiguration config, IInvocationContext context,
- Throwable exception) {
+ @Override
+ public void doCleanUp(IInvocationContext context, IConfiguration config, Throwable exception) {
for (String deviceName : context.getDeviceConfigNames()) {
List<ITargetPreparer> preparers =
config.getDeviceConfigByName(deviceName).getTargetPreparers();
@@ -767,8 +763,15 @@
* @param listener the {@link ITestInvocationListener} of test results
* @throws DeviceNotAvailableException
*/
- private void runTests(IInvocationContext context, IConfiguration config,
- ITestInvocationListener listener) throws DeviceNotAvailableException {
+ @VisibleForTesting
+ void runTests(
+ IInvocationContext context, IConfiguration config, ITestInvocationListener listener)
+ throws DeviceNotAvailableException {
+ // Wrap collectors in each other and collection will be sequential
+ for (IMetricCollector collector : config.getMetricCollectors()) {
+ listener = collector.init(context, listener);
+ }
+
for (IRemoteTest test : config.getTests()) {
// For compatibility of those receivers, they are assumed to be single device alloc.
if (test instanceof IDeviceTest) {
@@ -834,44 +837,20 @@
}
}
- /**
- * {@inheritDoc}
- */
@Override
- public void invoke(
- IInvocationContext context, IConfiguration config, IRescheduler rescheduler,
- ITestInvocationListener... extraListeners)
- throws DeviceNotAvailableException, Throwable {
- List<ITestInvocationListener> allListeners =
- new ArrayList<>(config.getTestInvocationListeners().size() + extraListeners.length);
- allListeners.addAll(config.getTestInvocationListeners());
- allListeners.addAll(Arrays.asList(extraListeners));
- if (config.getProfiler() != null) {
- allListeners.add(new AggregatingProfilerListener(config.getProfiler()));
+ public boolean fetchBuild(
+ IInvocationContext context,
+ IConfiguration config,
+ IRescheduler rescheduler,
+ ITestInvocationListener listener)
+ throws DeviceNotAvailableException {
+ // If the invocation is currently sandboxed, builds have already been downloaded.
+ // TODO: refactor to be part of new TestInvocation type.
+ if (config.getConfigurationDescription().shouldUseSandbox()) {
+ return true;
}
- ITestInvocationListener listener = new LogSaverResultForwarder(config.getLogSaver(),
- allListeners);
String currentDeviceName = null;
try {
- mStatus = "fetching build";
- config.getLogOutput().init();
- getLogRegistry().registerLogger(config.getLogOutput());
- for (String deviceName : context.getDeviceConfigNames()) {
- context.getDevice(deviceName).clearLastConnectedWifiNetwork();
- context.getDevice(deviceName).setOptions(
- config.getDeviceConfigByName(deviceName).getDeviceOptions());
- if (config.getDeviceConfigByName(deviceName).getDeviceOptions()
- .isLogcatCaptureEnabled()) {
- if (!(context.getDevice(deviceName).getIDevice() instanceof StubDevice)) {
- context.getDevice(deviceName).startLogcat();
- }
- }
- }
-
- String cmdLineArgs = config.getCommandLine();
- if (cmdLineArgs != null) {
- CLog.i("Invocation was started with cmd: %s", cmdLineArgs);
- }
updateInvocationContext(context, config);
// TODO: evaluate fetching build in parallel
for (String deviceName : context.getDeviceConfigNames()) {
@@ -882,11 +861,11 @@
IBuildProvider provider = deviceConfig.getBuildProvider();
// Set the provider test tag
if (provider instanceof IInvocationContextReceiver) {
- ((IInvocationContextReceiver)provider).setInvocationContext(context);
+ ((IInvocationContextReceiver) provider).setInvocationContext(context);
}
// Get the build
if (provider instanceof IDeviceBuildProvider) {
- info = ((IDeviceBuildProvider)provider).getBuild(device);
+ info = ((IDeviceBuildProvider) provider).getBuild(device);
} else {
info = provider.getBuild();
}
@@ -902,24 +881,13 @@
device.getSerialNumber());
rescheduleTest(config, rescheduler);
// Set the exit code to error
- setExitCode(ExitCode.NO_BUILD,
- new BuildRetrievalError("No build found to test."));
- return;
+ setExitCode(
+ ExitCode.NO_BUILD, new BuildRetrievalError("No build found to test."));
+ return false;
}
// TODO: remove build update when reporting is done on context
updateBuild(info, config);
}
- if (shardConfig(config, context, rescheduler)) {
- CLog.i("Invocation for %s has been sharded, rescheduling",
- context.getSerials().toString());
- } else {
- if (config.getTests() == null || config.getTests().isEmpty()) {
- CLog.e("No tests to run");
- } else {
- performInvocation(config, context, rescheduler, listener);
- setExitCode(ExitCode.NO_ERROR, null);
- }
- }
} catch (BuildRetrievalError e) {
CLog.e(e);
if (currentDeviceName != null) {
@@ -935,25 +903,92 @@
}
reportHostLog(listener, config.getLogOutput());
listener.invocationEnded(0);
- return;
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public void cleanUpBuilds(IInvocationContext context, IConfiguration config) {
+ // Ensure build infos are always cleaned up at the end of invocation.
+ for (String cleanUpDevice : context.getDeviceConfigNames()) {
+ if (context.getBuildInfo(cleanUpDevice) != null) {
+ try {
+ config.getDeviceConfigByName(cleanUpDevice)
+ .getBuildProvider()
+ .cleanUp(context.getBuildInfo(cleanUpDevice));
+ } catch (RuntimeException e) {
+ // We catch an simply log exception in cleanUp to avoid missing any final
+ // step of the invocation.
+ CLog.e(e);
+ }
+ }
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void invoke(
+ IInvocationContext context,
+ IConfiguration config,
+ IRescheduler rescheduler,
+ ITestInvocationListener... extraListeners)
+ throws DeviceNotAvailableException, Throwable {
+ List<ITestInvocationListener> allListeners =
+ new ArrayList<>(config.getTestInvocationListeners().size() + extraListeners.length);
+ allListeners.addAll(config.getTestInvocationListeners());
+ allListeners.addAll(Arrays.asList(extraListeners));
+ if (config.getProfiler() != null) {
+ allListeners.add(new AggregatingProfilerListener(config.getProfiler()));
+ }
+ ITestInvocationListener listener =
+ new LogSaverResultForwarder(config.getLogSaver(), allListeners);
+ try {
+ mStatus = "fetching build";
+ config.getLogOutput().init();
+ getLogRegistry().registerLogger(config.getLogOutput());
+ for (String deviceName : context.getDeviceConfigNames()) {
+ context.getDevice(deviceName).clearLastConnectedWifiNetwork();
+ context.getDevice(deviceName)
+ .setOptions(config.getDeviceConfigByName(deviceName).getDeviceOptions());
+ if (config.getDeviceConfigByName(deviceName)
+ .getDeviceOptions()
+ .isLogcatCaptureEnabled()) {
+ if (!(context.getDevice(deviceName).getIDevice() instanceof StubDevice)) {
+ context.getDevice(deviceName).startLogcat();
+ }
+ }
+ }
+
+ String cmdLineArgs = config.getCommandLine();
+ if (cmdLineArgs != null) {
+ CLog.i("Invocation was started with cmd: %s", cmdLineArgs);
+ }
+
+ boolean providerSuccess = fetchBuild(context, config, rescheduler, listener);
+ if (!providerSuccess) {
+ return;
+ }
+
+ boolean sharding = shardConfig(config, context, rescheduler);
+ if (sharding) {
+ CLog.i("Invocation for %s has been sharded, rescheduling", context.getSerials());
+ return;
+ }
+
+ if (config.getTests() == null || config.getTests().isEmpty()) {
+ CLog.e("No tests to run");
+ return;
+ }
+
+ performInvocation(config, context, rescheduler, listener);
+ setExitCode(ExitCode.NO_ERROR, null);
} catch (IOException e) {
CLog.e(e);
} finally {
// Ensure build infos are always cleaned up at the end of invocation.
- for (String cleanUpDevice : context.getDeviceConfigNames()) {
- if (context.getBuildInfo(cleanUpDevice) != null) {
- try {
- config.getDeviceConfigByName(cleanUpDevice)
- .getBuildProvider()
- .cleanUp(context.getBuildInfo(cleanUpDevice));
- } catch (RuntimeException e) {
- // We catch an simply log exception in cleanUp to avoid missing any final
- // step of the invocation.
- CLog.e(e);
- }
- }
- }
+ cleanUpBuilds(context, config);
// ensure we always deregister the logger
for (String deviceName : context.getDeviceConfigNames()) {
diff --git a/src/com/android/tradefed/profiler/AggregatingProfiler.java b/src/com/android/tradefed/profiler/AggregatingProfiler.java
index b24f87e..52865bc 100644
--- a/src/com/android/tradefed/profiler/AggregatingProfiler.java
+++ b/src/com/android/tradefed/profiler/AggregatingProfiler.java
@@ -114,7 +114,8 @@
@Override
public void reportAllMetrics(ITestInvocationListener listener) {
mOutputUtil.addMetrics("aggregate", mContext.getTestTag(), mAggregateMetrics);
- listener.testLog(getDescription(), LogDataType.TEXT, mOutputUtil.getFormattedMetrics());
+ listener.testLog(getDescription(), LogDataType.MUGSHOT_LOG,
+ mOutputUtil.getFormattedMetrics());
}
/**
diff --git a/src/com/android/tradefed/profiler/recorder/MetricType.java b/src/com/android/tradefed/profiler/recorder/MetricType.java
index 36d325e..65b85bc 100644
--- a/src/com/android/tradefed/profiler/recorder/MetricType.java
+++ b/src/com/android/tradefed/profiler/recorder/MetricType.java
@@ -16,13 +16,13 @@
package com.android.tradefed.profiler.recorder;
-/**
- * An enum describing different ways that {@link TraceMetric}s can be aggregated.
- */
+/** An enum describing different ways that {@link TraceMetric}s can be aggregated. */
public enum MetricType {
- COUNT,
- SUM,
- COUNTPOS,
AVG,
- AVGTIME
+ AVGTIME,
+ COUNT,
+ COUNTPOS,
+ MAX,
+ MIN,
+ SUM,
}
diff --git a/src/com/android/tradefed/profiler/recorder/NumericAggregateFunction.java b/src/com/android/tradefed/profiler/recorder/NumericAggregateFunction.java
new file mode 100644
index 0000000..6de96ab
--- /dev/null
+++ b/src/com/android/tradefed/profiler/recorder/NumericAggregateFunction.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.tradefed.profiler.recorder;
+
+import java.util.function.BiFunction;
+
+/** A wrapper of BiFunction that aggregates numeric values. */
+public class NumericAggregateFunction {
+
+ private double mCount = 0;
+ private BiFunction<Double, Double, Double> f;
+
+ /** Creates an aggregate function for the given {@link MetricType}. */
+ public NumericAggregateFunction(MetricType metricType) {
+ switch (metricType) {
+ case AVG:
+ case AVGTIME:
+ f = (avg, value) -> avg + ((value - avg) / ++mCount);
+ return;
+ case COUNT:
+ f = (count, value) -> count + 1;
+ return;
+ case COUNTPOS:
+ f = (count, value) -> (value > 0 ? count + 1 : count);
+ return;
+ case MAX:
+ f = (max, value) -> Math.max(max, value);
+ return;
+ case MIN:
+ f = (min, value) -> Math.min(min, value);
+ return;
+ case SUM:
+ f = (sum, value) -> sum + value;
+ return;
+ default:
+ throw new IllegalArgumentException("Unknown metric type " + metricType.toString());
+ }
+ }
+
+ /** Returns the stored aggregate function. */
+ public BiFunction<Double, Double, Double> getFunction() {
+ return f;
+ }
+}
diff --git a/src/com/android/tradefed/profiler/recorder/NumericMetricsRecorder.java b/src/com/android/tradefed/profiler/recorder/NumericMetricsRecorder.java
deleted file mode 100644
index fcc48d1..0000000
--- a/src/com/android/tradefed/profiler/recorder/NumericMetricsRecorder.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.tradefed.profiler.recorder;
-
-import java.util.function.BiFunction;
-
-/**
- * A {@link IMetricsRecorder} that aggregates metrics using some basic numeric functions. This class
- * doesn't implement any methods of the {@link IMetricsRecorder} interface, it just provides
- * (possibly stateful) numeric functions to its subclasses.
- */
-public abstract class NumericMetricsRecorder implements IMetricsRecorder {
-
- private double mRunningCount = 0;
-
- /**
- * Provides an aggregator function which sums values.
- *
- * @return a sum function
- */
- protected BiFunction<Double, Double, Double> sum() {
- return (oldVal, newVal) -> oldVal + newVal;
- }
-
- /**
- * Provides an aggregator function which counts values.
- *
- * @return a count function
- */
- protected BiFunction<Double, Double, Double> count() {
- return (oldVal, newVal) -> oldVal + 1;
- }
-
- /**
- * Provides an aggregator function which counts positive values.
- *
- * @return a countpos function
- */
- protected BiFunction<Double, Double, Double> countpos() {
- return (oldVal, newVal) -> (newVal == 0 ? oldVal : oldVal + 1);
- }
-
- /**
- * Provides an aggregator function which average values.
- *
- * @return an average function
- */
- protected BiFunction<Double, Double, Double> avg() {
- return (prevAvg, newVal) -> prevAvg + ((newVal - prevAvg) / ++mRunningCount);
- }
-}
diff --git a/src/com/android/tradefed/profiler/recorder/TraceMetricsRecorder.java b/src/com/android/tradefed/profiler/recorder/TraceMetricsRecorder.java
index 57960eb..453ddd0 100644
--- a/src/com/android/tradefed/profiler/recorder/TraceMetricsRecorder.java
+++ b/src/com/android/tradefed/profiler/recorder/TraceMetricsRecorder.java
@@ -37,7 +37,7 @@
* Metrics to be recorded need to be provided as TraceMetrics. The default descriptor
* has the format prefix:funcname:param[=expectedval]:metrictype.
*/
-public class TraceMetricsRecorder extends NumericMetricsRecorder {
+public class TraceMetricsRecorder implements IMetricsRecorder {
private static final String TRACE_DIR = "/d/tracing";
private static final String EVENT_DIR = TRACE_DIR + "/events/";
@@ -56,7 +56,8 @@
TraceMetric metric = TraceMetric.parse(descriptor);
enableSingleEventTrace(device, metric.getPrefix() + "/" + metric.getFuncName());
mTraceMetrics.put(metric.getFuncName(), metric);
- mMergeFunctions.put(metric, getMergeFunctionByMetricType(metric.getMetricType()));
+ mMergeFunctions.put(
+ metric, new NumericAggregateFunction(metric.getMetricType()).getFunction());
}
}
@@ -147,17 +148,6 @@
device.executeShellCommand("echo 1 > " + fullLocation);
}
- private BiFunction<Double, Double, Double> getMergeFunctionByMetricType(MetricType t) {
- switch(t) {
- case COUNT: return count();
- case COUNTPOS: return countpos();
- case SUM: return sum();
- case AVG:
- case AVGTIME: return avg();
- default: throw new IllegalArgumentException("unknown metric type " + t);
- }
- }
-
protected BufferedReader getReaderFromFile(File trace) throws FileNotFoundException {
return new BufferedReader(new FileReader(trace));
}
diff --git a/src/com/android/tradefed/result/CollectingTestListener.java b/src/com/android/tradefed/result/CollectingTestListener.java
index 80d5987..edecf72 100644
--- a/src/com/android/tradefed/result/CollectingTestListener.java
+++ b/src/com/android/tradefed/result/CollectingTestListener.java
@@ -42,7 +42,10 @@
// Uses a LinkedHashmap to have predictable iteration order
private Map<String, TestRunResult> mRunResultsMap =
Collections.synchronizedMap(new LinkedHashMap<String, TestRunResult>());
+ private Map<TestRunResult, IInvocationContext> mModuleContextMap =
+ Collections.synchronizedMap(new LinkedHashMap<TestRunResult, IInvocationContext>());
private TestRunResult mCurrentResults = new TestRunResult();
+ private IInvocationContext mCurrentModuleContext = null;
/** represents sums of tests in each TestStatus state for all runs.
* Indexed by TestStatus.ordinal() */
@@ -116,6 +119,16 @@
mBuildInfo = buildInfo;
}
+ @Override
+ public void testModuleStarted(IInvocationContext moduleContext) {
+ mCurrentModuleContext = moduleContext;
+ }
+
+ @Override
+ public void testModuleEnded() {
+ mCurrentModuleContext = null;
+ }
+
/**
* {@inheritDoc}
*/
@@ -130,6 +143,10 @@
mCurrentResults.setAggregateMetrics(mIsAggregateMetrics);
mRunResultsMap.put(name, mCurrentResults);
+ // track the module context associated with the results.
+ if (mCurrentModuleContext != null) {
+ mModuleContextMap.put(mCurrentResults, mCurrentModuleContext);
+ }
}
mCurrentResults.testRunStarted(name, numTests);
mIsCountDirty = true;
@@ -232,6 +249,14 @@
return mRunResultsMap.values();
}
+ /**
+ * Returns the {@link IInvocationContext} of the module associated with the results or null if
+ * it was not associated with any module.
+ */
+ public IInvocationContext getModuleContextForRunResult(TestRunResult res) {
+ return mModuleContextMap.get(res);
+ }
+
/** Returns True if the result map already has an entry for the run name. */
public boolean hasResultFor(String runName) {
return mRunResultsMap.containsKey(runName);
diff --git a/src/com/android/tradefed/result/FileMetadataCollector.java b/src/com/android/tradefed/result/FileMetadataCollector.java
index 28f8fff..15905cf 100644
--- a/src/com/android/tradefed/result/FileMetadataCollector.java
+++ b/src/com/android/tradefed/result/FileMetadataCollector.java
@@ -110,6 +110,8 @@
return LogType.COMPACT_MEMINFO;
case SERVICES:
return LogType.SERVICES;
+ case MUGSHOT_LOG:
+ return LogType.MUGSHOT;
default: // All others
return LogType.UNKNOWN;
}
diff --git a/src/com/android/tradefed/result/ITestInvocationListener.java b/src/com/android/tradefed/result/ITestInvocationListener.java
index c8fbd7c..5e951d8 100644
--- a/src/com/android/tradefed/result/ITestInvocationListener.java
+++ b/src/com/android/tradefed/result/ITestInvocationListener.java
@@ -20,6 +20,7 @@
import com.android.tradefed.command.ICommandScheduler;
import com.android.tradefed.invoker.IInvocationContext;
import com.android.tradefed.log.ITestLogger;
+import com.android.tradefed.testtype.suite.ITestSuite;
import java.util.Map;
@@ -95,6 +96,18 @@
}
/**
+ * Reports the beginning of a module running. This callback is associated with {@link
+ * #testModuleEnded()} and is optional in the sequence. It is only used during a run that uses
+ * modules: {@link ITestSuite} based runners.
+ *
+ * @param moduleContext the {@link IInvocationContext} of the module.
+ */
+ public default void testModuleStarted(IInvocationContext moduleContext) {}
+
+ /** Reports the end of a module run. */
+ public default void testModuleEnded() {}
+
+ /**
* {@inheritDoc}
*/
@Override
diff --git a/src/com/android/tradefed/result/JUnit4ResultForwarder.java b/src/com/android/tradefed/result/JUnit4ResultForwarder.java
index 07086e4..a83a4dd 100644
--- a/src/com/android/tradefed/result/JUnit4ResultForwarder.java
+++ b/src/com/android/tradefed/result/JUnit4ResultForwarder.java
@@ -44,8 +44,14 @@
@Override
public void testFailure(Failure failure) throws Exception {
Description description = failure.getDescription();
+ if (description.getMethodName() == null) {
+ // In case of exception in @BeforeClass, the method name will be null
+ mListener.testRunFailed(String.format("Failed with trace: %s", failure.getTrace()));
+ return;
+ }
TestIdentifier testid = new TestIdentifier(description.getClassName(),
description.getMethodName());
+
mListener.testFailed(testid, failure.getTrace());
}
diff --git a/src/com/android/tradefed/result/LogDataType.java b/src/com/android/tradefed/result/LogDataType.java
index ee19af4..58d79f0 100644
--- a/src/com/android/tradefed/result/LogDataType.java
+++ b/src/com/android/tradefed/result/LogDataType.java
@@ -37,6 +37,7 @@
LOGCAT("txt", "text/plain", false, true),
KERNEL_LOG("txt", "text/plain", false, true),
MONKEY_LOG("txt", "text/plain", false, true),
+ MUGSHOT_LOG("txt", "text/plain", false, true),
PROCRANK("txt", "text/plain", false, true),
MEM_INFO("txt", "text/plain", false, true),
TOP("txt", "text/plain", false, true),
diff --git a/src/com/android/tradefed/result/ResultForwarder.java b/src/com/android/tradefed/result/ResultForwarder.java
index 494fc7a..41a3e88 100644
--- a/src/com/android/tradefed/result/ResultForwarder.java
+++ b/src/com/android/tradefed/result/ResultForwarder.java
@@ -297,4 +297,18 @@
}
}
}
+
+ @Override
+ public void testModuleStarted(IInvocationContext moduleContext) {
+ for (ITestInvocationListener listener : mListeners) {
+ listener.testModuleStarted(moduleContext);
+ }
+ }
+
+ @Override
+ public void testModuleEnded() {
+ for (ITestInvocationListener listener : mListeners) {
+ listener.testModuleEnded();
+ }
+ }
}
diff --git a/src/com/android/tradefed/sandbox/ISandbox.java b/src/com/android/tradefed/sandbox/ISandbox.java
new file mode 100644
index 0000000..3f10206
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/ISandbox.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.util.CommandResult;
+
+import java.io.File;
+
+/** Interface defining a sandbox that can be used to run an invocation. */
+public interface ISandbox {
+
+ /**
+ * Prepare the environment for the sandbox to run properly.
+ *
+ * @param context the current invocation {@link IInvocationContext}.
+ * @param configuration the {@link IConfiguration} for the command to run.
+ * @param listener the current invocation {@link ITestInvocationListener} where final results
+ * should be piped.
+ * @return an {@link Exception} containing the failure. or Null if successful.
+ */
+ public Exception prepareEnvironment(
+ IInvocationContext context,
+ IConfiguration configuration,
+ ITestInvocationListener listener);
+
+ /**
+ * Run the sandbox with the enviroment that was set.
+ *
+ * @param configuration the {@link IConfiguration} for the command to run.
+ * @return a {@link CommandResult} with the status of the sandbox run and logs.
+ */
+ public CommandResult run(IConfiguration configuration);
+
+ /** Clean up any states, files or environment that may have been changed. */
+ public void tearDown();
+
+ /**
+ * Returns the environment TF to be used based on the command line arguments.
+ *
+ * @param args the command line arguments.
+ * @return a {@link File} directory containing the TF environment jars.
+ */
+ public File getTradefedEnvironment(String[] args) throws ConfigurationException;
+}
diff --git a/src/com/android/tradefed/sandbox/SandboxConfigDump.java b/src/com/android/tradefed/sandbox/SandboxConfigDump.java
new file mode 100644
index 0000000..898f2d1
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/SandboxConfigDump.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.tradefed.config.Configuration;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.ConfigurationFactory;
+import com.android.tradefed.config.GlobalConfiguration;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.config.IConfigurationFactory;
+import com.android.tradefed.result.SubprocessResultsReporter;
+import com.android.tradefed.util.StreamUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Runner class that creates a {@link IConfiguration} based on a command line and dump it to a file.
+ * args: <DumpCmd> <output File> <remaing command line>
+ */
+public class SandboxConfigDump {
+
+ public enum DumpCmd {
+ /** The full xml based on the command line will be outputted */
+ FULL_XML,
+ /** Only non-versioned element of the xml will be outputted */
+ NON_VERSIONED_CONFIG,
+ /** A run-ready config will be outputted */
+ RUN_CONFIG
+ }
+
+ /**
+ * We do not output the versioned elements to avoid causing the parent process to have issues
+ * with them when trying to resolve them
+ */
+ private static final List<String> VERSIONED_ELEMENTS = new ArrayList<>();
+
+ static {
+ VERSIONED_ELEMENTS.add(Configuration.MULTI_PREPARER_TYPE_NAME);
+ VERSIONED_ELEMENTS.add(Configuration.TARGET_PREPARER_TYPE_NAME);
+ VERSIONED_ELEMENTS.add(Configuration.TEST_TYPE_NAME);
+ }
+
+ /**
+ * Parse the args and creates a {@link IConfiguration} from it then dumps it to the result file.
+ */
+ public int parse(String[] args) {
+ // TODO: add some more checking
+ List<String> argList = new ArrayList<>(Arrays.asList(args));
+ DumpCmd cmd = DumpCmd.valueOf(argList.remove(0));
+ File resFile = new File(argList.remove(0));
+ IConfigurationFactory factory = ConfigurationFactory.getInstance();
+ PrintWriter pw = null;
+ try {
+ // TODO: Handle keystore
+ IConfiguration config =
+ factory.createConfigurationFromArgs(argList.toArray(new String[0]));
+ if (DumpCmd.RUN_CONFIG.equals(cmd)) {
+ config.getCommandOptions().setShouldUseSandboxing(false);
+ config.getConfigurationDescription().setSandboxed(true);
+ config.setTestInvocationListener(new SubprocessResultsReporter());
+ }
+ pw = new PrintWriter(resFile);
+ if (DumpCmd.NON_VERSIONED_CONFIG.equals(cmd)) {
+ // Remove elements that are versioned.
+ config.dumpXml(pw, VERSIONED_ELEMENTS);
+ } else {
+ // FULL_XML in that case.
+ config.dumpXml(pw);
+ }
+ } catch (ConfigurationException | IOException e) {
+ e.printStackTrace();
+ return 1;
+ } finally {
+ StreamUtil.close(pw);
+ }
+ return 0;
+ }
+
+ public static void main(final String[] mainArgs) {
+ try {
+ GlobalConfiguration.createGlobalConfiguration(new String[] {});
+ } catch (ConfigurationException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+ SandboxConfigDump configDump = new SandboxConfigDump();
+ int code = configDump.parse(mainArgs);
+ System.exit(code);
+ }
+}
diff --git a/src/com/android/tradefed/sandbox/SandboxConfigUtil.java b/src/com/android/tradefed/sandbox/SandboxConfigUtil.java
new file mode 100644
index 0000000..5aaa4e7
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/SandboxConfigUtil.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/** A utility class for managing {@link IConfiguration} when doing sandboxing. */
+public class SandboxConfigUtil {
+
+ private static final long DUMP_TIMEOUT = 2 * 60 * 1000; // 2min
+
+ /**
+ * Create a subprocess based on the Tf jars from any version, and dump the xml {@link
+ * IConfiguration} based on the command line args.
+ *
+ * @param rootDir the directory containing all the jars from TF.
+ * @param runUtil the {@link IRunUtil} to use to run the command.
+ * @param args the command line args.
+ * @param dump the {@link DumpCmd} driving some of the outputs.
+ * @return A {@link File} containing the xml dump from the command line.
+ * @throws ConfigurationException if the dump is not successful.
+ */
+ public static File dumpConfigForVersion(
+ File rootDir, IRunUtil runUtil, String[] args, DumpCmd dump)
+ throws ConfigurationException {
+ File destination;
+ try {
+ destination = FileUtil.createTempFile("config-container", ".xml");
+ } catch (IOException e) {
+ throw new ConfigurationException(e.getMessage());
+ }
+ List<String> mCmdArgs = new ArrayList<>();
+ mCmdArgs.add("java");
+ mCmdArgs.add("-cp");
+ mCmdArgs.add(new File(rootDir, "*").getAbsolutePath());
+ mCmdArgs.add(SandboxConfigDump.class.getCanonicalName());
+ mCmdArgs.add(dump.toString());
+ mCmdArgs.add(destination.getAbsolutePath());
+ for (String arg : args) {
+ mCmdArgs.add(arg);
+ }
+ CommandResult result = runUtil.runTimedCmd(DUMP_TIMEOUT, mCmdArgs.toArray(new String[0]));
+ if (CommandStatus.SUCCESS.equals(result.getStatus())) {
+ return destination;
+ }
+ FileUtil.deleteFile(destination);
+ throw new ConfigurationException(result.getStderr());
+ }
+}
diff --git a/src/com/android/tradefed/sandbox/SandboxInvocationRunner.java b/src/com/android/tradefed/sandbox/SandboxInvocationRunner.java
new file mode 100644
index 0000000..0ac8ae9
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/SandboxInvocationRunner.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.tradefed.config.Configuration;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+
+/** Run the tests associated with the invocation in the sandbox. */
+public class SandboxInvocationRunner {
+
+ /** Do setup and run the tests */
+ public static void prepareAndRun(
+ IConfiguration config, IInvocationContext context, ITestInvocationListener listener)
+ throws Throwable {
+ // TODO: refactor TestInvocation to be more modular in the sandbox handling
+ ISandbox sandbox =
+ (ISandbox) config.getConfigurationObject(Configuration.SANDBOX_TYPE_NAME);
+ Exception res = sandbox.prepareEnvironment(context, config, listener);
+ if (res != null) {
+ CLog.w("Sandbox prepareEnvironment threw an Exception.");
+ throw res;
+ }
+ try {
+ CommandResult result = sandbox.run(config);
+ if (!CommandStatus.SUCCESS.equals(result.getStatus())) {
+ throw new RuntimeException(result.getStderr());
+ }
+ } finally {
+ sandbox.tearDown();
+ }
+ }
+}
diff --git a/src/com/android/tradefed/sandbox/TradefedSanboxRunner.java b/src/com/android/tradefed/sandbox/TradefedSanboxRunner.java
new file mode 100644
index 0000000..d770c43
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/TradefedSanboxRunner.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.annotations.VisibleForTesting;
+import com.android.tradefed.command.CommandRunner.ExitCode;
+import com.android.tradefed.command.ICommandScheduler;
+import com.android.tradefed.command.ICommandScheduler.IScheduledInvocationListener;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.GlobalConfiguration;
+import com.android.tradefed.device.FreeDeviceState;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.device.NoDeviceException;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.util.SerializationUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+/** Runner associated with a {@link TradefedSandbox} that will allow executing the sandbox. */
+public class TradefedSanboxRunner {
+ private ICommandScheduler mScheduler;
+ private ExitCode mErrorCode = ExitCode.NO_ERROR;
+
+ public TradefedSanboxRunner() {}
+
+ public ExitCode getErrorCode() {
+ return mErrorCode;
+ }
+
+ /** Initialize the required global configuration. */
+ @VisibleForTesting
+ void initGlobalConfig(String[] args) throws ConfigurationException {
+ GlobalConfiguration.createGlobalConfiguration(args);
+ }
+
+ /** Get the {@link ICommandScheduler} instance from the global configuration. */
+ @VisibleForTesting
+ ICommandScheduler getCommandScheduler() {
+ return GlobalConfiguration.getInstance().getCommandScheduler();
+ }
+
+ /** Prints the exception stack to stderr. */
+ @VisibleForTesting
+ void printStackTrace(Throwable e) {
+ e.printStackTrace();
+ }
+
+ /**
+ * The main method to run the command.
+ *
+ * @param args the config name to run and its options
+ */
+ public void run(String[] args) {
+ List<String> argList = new ArrayList<>(Arrays.asList(args));
+ IInvocationContext context = null;
+
+ if (argList.size() < 2) {
+ mErrorCode = ExitCode.THROWABLE_EXCEPTION;
+ printStackTrace(
+ new RuntimeException("TradefedContainerRunner expect at least 2 args."));
+ return;
+ }
+
+ try {
+ context =
+ (IInvocationContext)
+ SerializationUtil.deserialize(new File(argList.remove(0)), false);
+ } catch (IOException e) {
+ printStackTrace(e);
+ mErrorCode = ExitCode.THROWABLE_EXCEPTION;
+ return;
+ }
+
+ try {
+ initGlobalConfig(new String[] {});
+ mScheduler = getCommandScheduler();
+ mScheduler.start();
+ mScheduler.execCommand(
+ context, new StubScheduledInvocationListener(), argList.toArray(new String[0]));
+ } catch (NoDeviceException e) {
+ printStackTrace(e);
+ mErrorCode = ExitCode.NO_DEVICE_ALLOCATED;
+ } catch (ConfigurationException e) {
+ printStackTrace(e);
+ mErrorCode = ExitCode.CONFIG_EXCEPTION;
+ } finally {
+ mScheduler.shutdownOnEmpty();
+ }
+ try {
+ mScheduler.join();
+ // If no error code has been raised yet, we checked the invocation error code.
+ if (ExitCode.NO_ERROR.equals(mErrorCode)) {
+ mErrorCode = mScheduler.getLastInvocationExitCode();
+ }
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ mErrorCode = ExitCode.THROWABLE_EXCEPTION;
+ }
+ if (!ExitCode.NO_ERROR.equals(mErrorCode)
+ && mScheduler.getLastInvocationThrowable() != null) {
+ // Print error to the stderr so that it can be recovered.
+ printStackTrace(mScheduler.getLastInvocationThrowable());
+ }
+ }
+
+ public static void main(final String[] mainArgs) {
+ TradefedSanboxRunner console = new TradefedSanboxRunner();
+ console.run(mainArgs);
+ System.exit(console.getErrorCode().getCodeValue());
+ }
+
+ /** A stub {@link IScheduledInvocationListener} that does nothing. */
+ public static class StubScheduledInvocationListener implements IScheduledInvocationListener {
+ @Override
+ public void invocationComplete(
+ IInvocationContext metadata, Map<ITestDevice, FreeDeviceState> devicesStates) {
+ // do nothing
+ }
+ }
+}
diff --git a/src/com/android/tradefed/sandbox/TradefedSandbox.java b/src/com/android/tradefed/sandbox/TradefedSandbox.java
new file mode 100644
index 0000000..7f1f8c4
--- /dev/null
+++ b/src/com/android/tradefed/sandbox/TradefedSandbox.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import com.android.annotations.VisibleForTesting;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.QuotationAwareTokenizer;
+import com.android.tradefed.util.RunUtil;
+import com.android.tradefed.util.SerializationUtil;
+import com.android.tradefed.util.StreamUtil;
+import com.android.tradefed.util.SubprocessTestResultsParser;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Sandbox container that can run a Trade Federation invocation. TODO: Allow Options to be passed to
+ * the sandbox.
+ */
+public class TradefedSandbox implements ISandbox {
+
+ /** The variable holding TF specific environment */
+ public static final String TF_GLOBAL_CONFIG = "TF_GLOBAL_CONFIG";
+ /** Timeout to wait for the events received from subprocess to finish being processed. */
+ private static final long EVENT_THREAD_JOIN_TIMEOUT_MS = 30 * 1000;
+
+ private File mStdoutFile = null;
+ private File mStderrFile = null;
+ private FileOutputStream mStdout = null;
+ private FileOutputStream mStderr = null;
+
+ private File mSandboxTmpFolder = null;
+ private File mRootFolder = null;
+ private File mSerializedContext = null;
+ private File mSerializedConfiguration = null;
+
+ private SubprocessTestResultsParser mEventParser = null;
+
+ private IRunUtil mRunUtil;
+
+ @Override
+ public CommandResult run(IConfiguration config) {
+ List<String> mCmdArgs = new ArrayList<>();
+ mCmdArgs.add("java");
+ mCmdArgs.add(String.format("-Djava.io.tmpdir=%s", mSandboxTmpFolder.getAbsolutePath()));
+ mCmdArgs.add("-cp");
+ mCmdArgs.add(new File(mRootFolder, "*").getAbsolutePath());
+ mCmdArgs.add(TradefedSanboxRunner.class.getCanonicalName());
+ mCmdArgs.add(mSerializedContext.getAbsolutePath());
+ mCmdArgs.add(mSerializedConfiguration.getAbsolutePath());
+ mCmdArgs.add("--subprocess-report-port");
+ mCmdArgs.add(Integer.toString(mEventParser.getSocketServerPort()));
+
+ long timeout = config.getCommandOptions().getInvocationTimeout();
+ CommandResult result =
+ mRunUtil.runTimedCmd(timeout, mStdout, mStderr, mCmdArgs.toArray(new String[0]));
+
+ boolean failedStatus = false;
+ if (!CommandStatus.SUCCESS.equals(result.getStatus())) {
+ failedStatus = true;
+ }
+
+ if (!mEventParser.joinReceiver(EVENT_THREAD_JOIN_TIMEOUT_MS)) {
+ if (!failedStatus) {
+ result.setStatus(CommandStatus.EXCEPTION);
+ }
+ String stderrText;
+ try {
+ stderrText = FileUtil.readStringFromFile(mStderrFile);
+ } catch (IOException e) {
+ stderrText = "Could not read the stderr output from process.";
+ }
+ result.setStderr(
+ String.format("Event receiver thread did not complete.:\n%s", stderrText));
+ }
+
+ return result;
+ }
+
+ @Override
+ public Exception prepareEnvironment(
+ IInvocationContext context, IConfiguration config, ITestInvocationListener listener) {
+ // Create our temp directories.
+ try {
+ mStdoutFile = FileUtil.createTempFile("stdout_subprocess_", ".log");
+ mStderrFile = FileUtil.createTempFile("stderr_subprocess_", ".log");
+ mStdout = new FileOutputStream(mStdoutFile);
+ mStderr = new FileOutputStream(mStderrFile);
+
+ mSandboxTmpFolder = FileUtil.createTempDir("tradefed-container");
+ } catch (IOException e) {
+ return e;
+ }
+ // Unset the current global environment
+ mRunUtil = createRunUtil();
+ mRunUtil.unsetEnvVariable(TF_GLOBAL_CONFIG);
+ // TODO: add handling of setting and creating the subprocess global configuration
+
+ try {
+ mRootFolder =
+ getTradefedEnvironment(
+ QuotationAwareTokenizer.tokenizeLine(config.getCommandLine()));
+ } catch (ConfigurationException e) {
+ return e;
+ }
+
+ // Prepare the configuration
+ Exception res = prepareConfiguration(context, config, listener);
+ if (res != null) {
+ return res;
+ }
+
+ // Prepare the context
+ try {
+ mSerializedContext = prepareContext(context);
+ } catch (IOException e) {
+ return e;
+ }
+
+ return null;
+ }
+
+ @Override
+ public void tearDown() {
+ StreamUtil.close(mEventParser);
+ StreamUtil.close(mStdout);
+ StreamUtil.close(mStderr);
+ FileUtil.deleteFile(mStdoutFile);
+ FileUtil.deleteFile(mStderrFile);
+ FileUtil.recursiveDelete(mSandboxTmpFolder);
+ FileUtil.deleteFile(mSerializedContext);
+ FileUtil.deleteFile(mSerializedConfiguration);
+ }
+
+ @Override
+ public File getTradefedEnvironment(String[] args) throws ConfigurationException {
+ String tfDir = System.getProperty("TF_JAR_DIR");
+ if (tfDir == null || tfDir.isEmpty()) {
+ throw new ConfigurationException(
+ "Could not read TF_JAR_DIR to get current Tradefed instance.");
+ }
+ return new File(tfDir);
+ }
+
+ /**
+ * Prepare the {@link IConfiguration} that will be passed to the subprocess and will drive the
+ * container execution.
+ *
+ * @param context The current {@link IInvocationContext}.
+ * @param config the {@link IConfiguration} to be prepared.
+ * @param listener The current invocation {@link ITestInvocationListener}.
+ * @return an Exception if anything went wrong, null otherwise.
+ */
+ protected Exception prepareConfiguration(
+ IInvocationContext context, IConfiguration config, ITestInvocationListener listener) {
+ try {
+ // TODO: add option to disable the streaming back of results.
+ mEventParser = new SubprocessTestResultsParser(listener, true, context);
+ String[] args = QuotationAwareTokenizer.tokenizeLine(config.getCommandLine());
+ mSerializedConfiguration =
+ SandboxConfigUtil.dumpConfigForVersion(
+ mRootFolder, mRunUtil, args, DumpCmd.RUN_CONFIG);
+ } catch (ConfigurationException | IOException e) {
+ return e;
+ }
+ return null;
+ }
+
+ @VisibleForTesting
+ IRunUtil createRunUtil() {
+ return new RunUtil();
+ }
+
+ /**
+ * Prepare and serialize the {@link IInvocationContext}.
+ *
+ * @param context the {@link IInvocationContext} to be prepared.
+ * @return the serialized {@link IInvocationContext}.
+ * @throws IOException
+ */
+ protected File prepareContext(IInvocationContext context) throws IOException {
+ return SerializationUtil.serialize(context);
+ }
+}
diff --git a/src/com/android/tradefed/suite/checker/ActivityStatusChecker.java b/src/com/android/tradefed/suite/checker/ActivityStatusChecker.java
new file mode 100644
index 0000000..4a66304
--- /dev/null
+++ b/src/com/android/tradefed/suite/checker/ActivityStatusChecker.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.suite.checker;
+
+import com.android.tradefed.device.DeviceNotAvailableException;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.ITestLogger;
+import com.android.tradefed.log.LogUtil.CLog;
+import com.android.tradefed.result.ITestLoggerReceiver;
+import com.android.tradefed.result.InputStreamSource;
+import com.android.tradefed.result.LogDataType;
+import com.android.tradefed.util.StreamUtil;
+
+/** Status checker for left over activities running at the end of a module. */
+public class ActivityStatusChecker implements ISystemStatusChecker, ITestLoggerReceiver {
+
+ private ITestLogger mLogger;
+
+ @Override
+ public boolean postExecutionCheck(ITestDevice device) throws DeviceNotAvailableException {
+ return isFrontActivityLauncher(device);
+ }
+
+ private boolean isFrontActivityLauncher(ITestDevice device) throws DeviceNotAvailableException {
+ String output =
+ device.executeShellCommand(
+ "dumpsys window windows | grep -E 'mCurrentFocus|mFocusedApp'");
+ CLog.d("dumpsys window windows: %s", output);
+ if (output.contains("Launcher")) {
+ return true;
+ } else {
+ InputStreamSource screen = device.getScreenshot("JPEG");
+ try {
+ mLogger.testLog("status_checker_front_activity", LogDataType.JPEG, screen);
+ } finally {
+ StreamUtil.cancel(screen);
+ }
+ // TODO: Add a step to return to home page, or refresh the device (reboot?)
+ return false;
+ }
+ }
+
+ @Override
+ public void setTestLogger(ITestLogger testLogger) {
+ mLogger = testLogger;
+ }
+}
diff --git a/src/com/android/tradefed/targetprep/DeviceSetup.java b/src/com/android/tradefed/targetprep/DeviceSetup.java
index 04668c4..f7ab89a 100644
--- a/src/com/android/tradefed/targetprep/DeviceSetup.java
+++ b/src/com/android/tradefed/targetprep/DeviceSetup.java
@@ -642,6 +642,7 @@
return;
}
+ CLog.d("Pushing the following properties to /data/local.prop:\n%s", sb.toString());
boolean result = device.pushString(sb.toString(), "/data/local.prop");
if (!result) {
throw new TargetSetupError(String.format("Failed to push /data/local.prop to %s",
diff --git a/src/com/android/tradefed/targetprep/StubTargetPreparer.java b/src/com/android/tradefed/targetprep/StubTargetPreparer.java
index abb93cf..bd0e778 100644
--- a/src/com/android/tradefed/targetprep/StubTargetPreparer.java
+++ b/src/com/android/tradefed/targetprep/StubTargetPreparer.java
@@ -19,11 +19,20 @@
import com.android.tradefed.build.IBuildInfo;
import com.android.tradefed.config.IConfiguration;
import com.android.tradefed.config.IConfigurationReceiver;
+import com.android.tradefed.config.Option;
+import com.android.tradefed.config.OptionClass;
import com.android.tradefed.device.ITestDevice;
/** Placeholder empty implementation of a {@link ITargetPreparer}. */
+@OptionClass(alias = "stub-preparer")
public class StubTargetPreparer implements ITargetPreparer, IConfigurationReceiver {
+ @Option(name = "test-boolean-option", description = "test option, keep default to true.")
+ private boolean mTestBooleanOption = true;
+
+ @Option(name = "test-boolean-option-false", description = "test option, keep default to true.")
+ private boolean mTestBooleanOptionFalse = false;
+
private IConfiguration mConfig;
/**
@@ -44,4 +53,12 @@
public IConfiguration getConfiguration() {
return mConfig;
}
+
+ public boolean getTestBooleanOption() {
+ return mTestBooleanOption;
+ }
+
+ public boolean getTestBooleanOptionFalse() {
+ return mTestBooleanOptionFalse;
+ }
}
diff --git a/src/com/android/tradefed/targetprep/TestAppInstallSetup.java b/src/com/android/tradefed/targetprep/TestAppInstallSetup.java
index ce94055..448fc7e 100644
--- a/src/com/android/tradefed/targetprep/TestAppInstallSetup.java
+++ b/src/com/android/tradefed/targetprep/TestAppInstallSetup.java
@@ -231,6 +231,13 @@
mAltDirs.add(altDir);
}
+ /**
+ * Set an alternate directory behaviors.
+ */
+ public void setAltDirBehavior(AltDirBehavior altDirBehavior) {
+ mAltDirBehavior = altDirBehavior;
+ }
+
/** Attempt to install a package on the device. */
private String installPackage(ITestDevice device, File testAppFile)
throws DeviceNotAvailableException {
diff --git a/src/com/android/tradefed/testtype/CodeCoverageTestBase.java b/src/com/android/tradefed/testtype/CodeCoverageTestBase.java
index 8a4ae4e..932c193 100644
--- a/src/com/android/tradefed/testtype/CodeCoverageTestBase.java
+++ b/src/com/android/tradefed/testtype/CodeCoverageTestBase.java
@@ -15,6 +15,8 @@
*/
package com.android.tradefed.testtype;
+import static com.google.common.base.Preconditions.checkState;
+
import com.android.ddmlib.testrunner.IRemoteAndroidTestRunner;
import com.android.ddmlib.testrunner.RemoteAndroidTestRunner;
import com.android.ddmlib.testrunner.TestIdentifier;
@@ -35,6 +37,8 @@
import com.android.tradefed.util.ICompressionStrategy;
import com.android.tradefed.util.ListInstrumentationParser;
import com.android.tradefed.util.ListInstrumentationParser.InstrumentationTarget;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableMap;
import java.io.File;
import java.io.IOException;
@@ -65,9 +69,11 @@
description = "Only run instrumentation targets with the given test runner")
private List<String> mRunnerFilter = new ArrayList<>();
- @Option(name = "instrumentation-arg",
- description = "Additional instrumentation arguments to provide to the runner")
- private Map<String, String> mInstrArgMap = new HashMap<String, String>();
+ @Option(
+ name = "instrumentation-arg",
+ description = "Additional instrumentation arguments to provide to the runner"
+ )
+ private Map<String, String> mInstrumentationArgs = new HashMap<String, String>();
@Option(name = "max-tests-per-chunk",
description = "Maximum number of tests to execute in a single call to 'am instrument'. "
@@ -113,14 +119,32 @@
return mPackageFilter;
}
+ /** Sets the package-filter option for testing. */
+ @VisibleForTesting
+ void setPackageFilter(List<String> packageFilter) {
+ mPackageFilter = packageFilter;
+ }
+
/** Returns the runner filter as set by the --runner option(s). */
List<String> getRunnerFilter() {
return mRunnerFilter;
}
+ /** Sets the runner-filter option for testing. */
+ @VisibleForTesting
+ void setRunnerFilter(List<String> runnerFilter) {
+ mRunnerFilter = runnerFilter;
+ }
+
/** Returns the instrumentation arguments as set by the --instrumentation-arg option(s). */
Map<String, String> getInstrumentationArgs() {
- return mInstrArgMap;
+ return mInstrumentationArgs;
+ }
+
+ /** Sets the instrumentation-arg options for testing. */
+ @VisibleForTesting
+ void setInstrumentationArgs(Map<String, String> instrumentationArgs) {
+ mInstrumentationArgs = ImmutableMap.copyOf(instrumentationArgs);
}
/** Returns the maximum number of tests to run at once as set by --max-tests-per-chunk. */
@@ -128,6 +152,12 @@
return mMaxTestsPerChunk;
}
+ /** Sets the max-tests-per-chunk option for testing. */
+ @VisibleForTesting
+ void setMaxTestsPerChunk(int maxTestsPerChunk) {
+ mMaxTestsPerChunk = maxTestsPerChunk;
+ }
+
/** Returns the compression strategy that should be used to archive the coverage report. */
ICompressionStrategy getCompressionStrategy() {
try {
@@ -159,7 +189,7 @@
File reportArchive = null;
// Initialize a listener to collect logged coverage files
try (CoverageCollectingListener coverageListener =
- new CoverageCollectingListener(listener)) {
+ new CoverageCollectingListener(getDevice(), listener)) {
// Make sure there are some installed instrumentation targets
Collection<InstrumentationTarget> instrumentationTargets = getInstrumentationTargets();
@@ -195,8 +225,9 @@
}
// Generate the coverage report(s) and log it
+ List<File> measurements = coverageListener.getCoverageFiles();
for (T format : getReportFormat()) {
- File report = generateCoverageReport(coverageListener.getCoverageFiles(), format);
+ File report = generateCoverageReport(measurements, format);
try {
doLogReport("coverage", format.getLogDataType(), report, listener);
} finally {
@@ -359,7 +390,7 @@
*/
TestRunResult runTest(InstrumentationTarget target, int shardIndex, int numShards,
ITestInvocationListener listener) throws DeviceNotAvailableException {
- return runTest(createCoverageTest(target, shardIndex, numShards), listener);
+ return runTest(createTest(target, shardIndex, numShards), listener);
}
/**
@@ -372,11 +403,11 @@
*/
TestRunResult runTest(InstrumentationTarget target, TestIdentifier identifier,
ITestInvocationListener listener) throws DeviceNotAvailableException {
- return runTest(createCoverageTest(target, identifier), listener);
+ return runTest(createTest(target, identifier), listener);
}
- /** Runs the given {@link CodeCoverageTest} and returns the {@link TestRunResult}. */
- TestRunResult runTest(CodeCoverageTest test, ITestInvocationListener listener)
+ /** Runs the given {@link InstrumentationTest} and returns the {@link TestRunResult}. */
+ TestRunResult runTest(InstrumentationTest test, ITestInvocationListener listener)
throws DeviceNotAvailableException {
// Run the test, and return the run results
CollectingTestListener results = new CollectingTestListener();
@@ -384,31 +415,35 @@
return results.getCurrentRunResults();
}
- /** Returns a new {@link CodeCoverageTest}. Exposed for unit testing. */
- CodeCoverageTest internalCreateCoverageTest() {
- return new CodeCoverageTest();
+ /** Returns a new {@link InstrumentationTest}. Exposed for unit testing. */
+ InstrumentationTest internalCreateTest() {
+ return new InstrumentationTest();
}
- /** Returns a new {@link CodeCoverageTest} for the given target. */
- CodeCoverageTest createCoverageTest(InstrumentationTarget target) {
- // Get a new CodeCoverageTest instance
- CodeCoverageTest ret = internalCreateCoverageTest();
+ /** Returns a new {@link InstrumentationTest} for the given target. */
+ InstrumentationTest createTest(InstrumentationTarget target) {
+ // Get a new InstrumentationTest instance
+ InstrumentationTest ret = internalCreateTest();
ret.setDevice(getDevice());
ret.setPackageName(target.packageName);
ret.setRunnerName(target.runnerName);
+ // Disable rerun mode, we want to stop the tests as soon as we fail.
+ ret.setRerunMode(false);
+
// Add instrumentation arguments
for (Map.Entry<String, String> argEntry : getInstrumentationArgs().entrySet()) {
ret.addInstrumentationArg(argEntry.getKey(), argEntry.getValue());
}
+ ret.addInstrumentationArg("coverage", "true");
return ret;
}
- /** Returns a new {@link CodeCoverageTest} for the identified test on the given target. */
- CodeCoverageTest createCoverageTest(InstrumentationTarget target, TestIdentifier identifier) {
- // Get a new CodeCoverageTest instance
- CodeCoverageTest ret = createCoverageTest(target);
+ /** Returns a new {@link InstrumentationTest} for the identified test on the given target. */
+ InstrumentationTest createTest(InstrumentationTarget target, TestIdentifier identifier) {
+ // Get a new InstrumentationTest instance
+ InstrumentationTest ret = createTest(target);
// Set the specific test method to run
ret.setClassName(identifier.getClassName());
@@ -417,11 +452,10 @@
return ret;
}
- /** Returns a new {@link CodeCoverageTest} for a particular shard on the given target. */
- CodeCoverageTest createCoverageTest(InstrumentationTarget target, int shardIndex,
- int numShards) {
- // Get a new CodeCoverageTest instance
- CodeCoverageTest ret = createCoverageTest(target);
+ /** Returns a new {@link InstrumentationTest} for a particular shard on the given target. */
+ InstrumentationTest createTest(InstrumentationTarget target, int shardIndex, int numShards) {
+ // Get a new InstrumentationTest instance
+ InstrumentationTest ret = createTest(target);
// Add shard options if necessary
if (numShards > 1) {
@@ -436,22 +470,24 @@
public static class CoverageCollectingListener extends ResultForwarder
implements AutoCloseable {
+ private ITestDevice mDevice;
private List<File> mCoverageFiles = new ArrayList<>();
private File mCoverageDir;
+ private String mCurrentRunName;
- public CoverageCollectingListener(ITestInvocationListener... listeners) throws IOException {
+ public CoverageCollectingListener(ITestDevice device, ITestInvocationListener... listeners)
+ throws IOException {
super(listeners);
+ mDevice = device;
+
// Initialize a directory to store the coverage files
mCoverageDir = FileUtil.createTempDir("execution_data");
}
/** Returns the list of collected coverage files. */
public List<File> getCoverageFiles() {
- // It is an error to use this object after it has been closed
- if (mCoverageDir == null) {
- throw new IllegalStateException("This object is closed");
- }
+ checkState(mCoverageDir != null, "This object is closed");
return mCoverageFiles;
}
@@ -460,32 +496,66 @@
*/
@Override
public void testLog(String dataName, LogDataType dataType, InputStreamSource dataStream) {
- // It is an error to use this object after it has been closed
- if (mCoverageDir == null) {
- throw new IllegalStateException("This object is closed");
- }
+ super.testLog(dataName, dataType, dataStream);
+ checkState(mCoverageDir != null, "This object is closed");
// We only care about coverage files
- if (!LogDataType.COVERAGE.equals(dataType)) {
- super.testLog(dataName, dataType, dataStream);
- return;
- }
-
- // Save coverage data to a temporary location, and don't inform the listeners yet
- try {
- File coverageFile = FileUtil.createTempFile(dataName + "_", ".exec", mCoverageDir);
- FileUtil.writeToFile(dataStream.createInputStream(), coverageFile);
- mCoverageFiles.add(coverageFile);
- CLog.d("Got coverage file: %s", coverageFile.getAbsolutePath());
- } catch (IOException e) {
- CLog.e("Failed to save coverage file");
- CLog.e(e);
+ if (LogDataType.COVERAGE.equals(dataType)) {
+ // Save coverage data to a temporary location, and don't inform the listeners yet
+ try {
+ File coverageFile =
+ FileUtil.createTempFile(dataName + "_", ".exec", mCoverageDir);
+ FileUtil.writeToFile(dataStream.createInputStream(), coverageFile);
+ mCoverageFiles.add(coverageFile);
+ CLog.d("Got coverage file: %s", coverageFile.getAbsolutePath());
+ } catch (IOException e) {
+ CLog.e("Failed to save coverage file");
+ CLog.e(e);
+ }
}
}
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
+ @Override
+ public void testRunStarted(String runName, int testCount) {
+ super.testRunStarted(runName, testCount);
+ mCurrentRunName = runName;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void testRunEnded(long elapsedTime, Map<String, String> runMetrics) {
+ // Look for the coverage file path from the run metrics
+ String coverageFilePath = runMetrics.get(CodeCoverageTest.COVERAGE_REMOTE_FILE_LABEL);
+ if (coverageFilePath != null) {
+ CLog.d("Coverage file at %s", coverageFilePath);
+
+ // Try to pull the coverage measurements off of the device
+ File coverageFile = null;
+ try {
+ coverageFile = mDevice.pullFile(coverageFilePath);
+ if (coverageFile != null) {
+ FileInputStreamSource source = new FileInputStreamSource(coverageFile);
+ testLog(
+ mCurrentRunName + "_runtime_coverage",
+ LogDataType.COVERAGE,
+ source);
+ source.cancel();
+ } else {
+ CLog.w("Failed to pull coverage file from device: %s", coverageFilePath);
+ }
+ } catch (DeviceNotAvailableException e) {
+ // Nothing we can do, so just log the error.
+ CLog.w(e);
+ } finally {
+ FileUtil.deleteFile(coverageFile);
+ }
+ }
+
+ super.testRunEnded(elapsedTime, runMetrics);
+ }
+
+ /** {@inheritDoc} */
@Override
public void close() {
FileUtil.recursiveDelete(mCoverageDir);
diff --git a/src/com/android/tradefed/testtype/DeviceSuite.java b/src/com/android/tradefed/testtype/DeviceSuite.java
index b39c5ae..dffc769 100644
--- a/src/com/android/tradefed/testtype/DeviceSuite.java
+++ b/src/com/android/tradefed/testtype/DeviceSuite.java
@@ -16,22 +16,34 @@
package com.android.tradefed.testtype;
import com.android.tradefed.build.IBuildInfo;
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.Option;
+import com.android.tradefed.config.OptionSetter;
import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.LogUtil.CLog;
import org.junit.runner.Runner;
+import org.junit.runner.notification.RunNotifier;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.RunnerBuilder;
+import java.util.HashSet;
+import java.util.Set;
+
/**
- * Extends the JUnit4 container {@link Suite} in order to provide a {@link ITestDevice} to the
- * tests that requires it.
+ * Extends the JUnit4 container {@link Suite} in order to provide a {@link ITestDevice} to the tests
+ * that requires it.
*/
-public class DeviceSuite extends Suite implements IDeviceTest, IBuildReceiver, IAbiReceiver {
+public class DeviceSuite extends Suite
+ implements IDeviceTest, IBuildReceiver, IAbiReceiver, ISetOptionReceiver {
private ITestDevice mDevice;
private IBuildInfo mBuildInfo;
private IAbi mAbi;
+ @Option(name = HostTest.SET_OPTION_NAME, description = HostTest.SET_OPTION_DESC)
+ private Set<String> mKeyValueOptions = new HashSet<>();
+
public DeviceSuite(Class<?> klass, RunnerBuilder builder) throws InitializationError {
super(klass, builder);
}
@@ -84,4 +96,17 @@
}
}
}
+
+ @Override
+ protected void runChild(Runner runner, RunNotifier notifier) {
+ try {
+ OptionSetter setter = new OptionSetter(runner);
+ for (String kv : mKeyValueOptions) {
+ setter.setOptionValue(HostTest.SET_OPTION_NAME, kv);
+ }
+ } catch (ConfigurationException e) {
+ CLog.d("Could not set option set-option on '%s', reason: '%s'", runner, e.getMessage());
+ }
+ super.runChild(runner, notifier);
+ }
}
diff --git a/src/com/android/tradefed/testtype/GTest.java b/src/com/android/tradefed/testtype/GTest.java
index dcff95b..23af7bf 100644
--- a/src/com/android/tradefed/testtype/GTest.java
+++ b/src/com/android/tradefed/testtype/GTest.java
@@ -116,6 +116,9 @@
description = "adb shell command(s) to run after GTest.")
private List<String> mAfterTestCmd = new ArrayList<>();
+ @Option(name = "run-test-as", description = "User to execute test binary as.")
+ private String mRunTestAs = null;
+
@Option(name = "ld-library-path",
description = "LD_LIBRARY_PATH value to include in the GTest execution command.")
private String mLdLibraryPath = null;
@@ -183,10 +186,6 @@
return mDevice;
}
- public void setEnableXmlOutput(boolean b) {
- mEnableXmlOutput = b;
- }
-
/**
* Set the Android native test module to run.
*
@@ -649,6 +648,12 @@
gTestCmdLine.append(String.format("GTEST_SHARD_INDEX=%s ", mShardIndex));
gTestCmdLine.append(String.format("GTEST_TOTAL_SHARDS=%s ", mShardCount));
}
+
+ // su to requested user
+ if (mRunTestAs != null) {
+ gTestCmdLine.append(String.format("su %s ", mRunTestAs));
+ }
+
gTestCmdLine.append(String.format("%s %s", fullPath, flags));
return gTestCmdLine.toString();
}
@@ -751,7 +756,4 @@
mCollectTestsOnly = shouldCollectTest;
}
- protected void setLoadFilterFromFile(String loadFilterFromFile) {
- mTestFilterKey = loadFilterFromFile;
- }
}
diff --git a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
index 7a893e5..1cafb3d 100644
--- a/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
+++ b/src/com/android/tradefed/testtype/PythonUnitTestResultParser.java
@@ -18,13 +18,14 @@
import com.android.ddmlib.MultiLineReceiver;
import com.android.ddmlib.testrunner.ITestRunListener;
import com.android.ddmlib.testrunner.TestIdentifier;
-import com.android.tradefed.log.LogUtil.CLog;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
/**
* Interprets the output of tests run with Python's unittest framework and translates it into
@@ -39,8 +40,7 @@
* Status ::= “OK” | “FAILED (errors=” int “)”.
* Traceback ::= string+.
*
- * Example output:
- * (passing)
+ * Example output (passing):
* test_size (test_rangelib.RangeSetTest) ... ok
* test_str (test_rangelib.RangeSetTest) ... ok
* test_subtract (test_rangelib.RangeSetTest) ... ok
@@ -51,7 +51,8 @@
* Ran 5 tests in 0.002s
*
* OK
- * (failed)
+ *
+ * Example output (failed)
* test_size (test_rangelib.RangeSetTest) ... ERROR
*
* ======================================================================
@@ -62,8 +63,50 @@
* raise ValueError()
* ValueError
* ----------------------------------------------------------------------
- * Ran 1 tests in 0.001s
+ * Ran 1 test in 0.001s
* FAILED (errors=1)
+ *
+ * Example output with several edge cases (failed):
+ * testError (foo.testFoo) ... ERROR
+ * testExpectedFailure (foo.testFoo) ... expected failure
+ * testFail (foo.testFoo) ... FAIL
+ * testFailWithDocString (foo.testFoo)
+ * foo bar ... FAIL
+ * testOk (foo.testFoo) ... ok
+ * testOkWithDocString (foo.testFoo)
+ * foo bar ... ok
+ * testSkipped (foo.testFoo) ... skipped 'reason foo'
+ * testUnexpectedSuccess (foo.testFoo) ... unexpected success
+ *
+ * ======================================================================
+ * ERROR: testError (foo.testFoo)
+ * ----------------------------------------------------------------------
+ * Traceback (most recent call last):
+ * File "foo.py", line 11, in testError
+ * self.assertEqual(2+2, 5/0)
+ * ZeroDivisionError: integer division or modulo by zero
+ *
+ * ======================================================================
+ * FAIL: testFail (foo.testFoo)
+ * ----------------------------------------------------------------------
+ * Traceback (most recent call last):
+ * File "foo.py", line 8, in testFail
+ * self.assertEqual(2+2, 5)
+ * AssertionError: 4 != 5
+ *
+ * ======================================================================
+ * FAIL: testFailWithDocString (foo.testFoo)
+ * foo bar
+ * ----------------------------------------------------------------------
+ * Traceback (most recent call last):
+ * File "foo.py", line 31, in testFailWithDocString
+ * self.assertEqual(2+2, 5)
+ * AssertionError: 4 != 5
+ *
+ * ----------------------------------------------------------------------
+ * Ran 8 tests in 0.001s
+ *
+ * FAILED (failures=2, errors=1, skipped=1, expected failures=1, unexpected successes=1)
*/
public class PythonUnitTestResultParser extends MultiLineReceiver {
@@ -80,22 +123,38 @@
int mTotalTestCount;
// General state
- private Map<TestIdentifier, String> mTestResultCache;
private int mFailedTestCount;
private final Collection<ITestRunListener> mListeners;
private final String mRunName;
+ private Map<TestIdentifier, String> mTestResultCache;
+ // Use a special entry to mark skipped test in mTestResultCache
+ static final String SKIPPED_ENTRY = "Skipped";
// Constant tokens that appear in the result grammar.
static final String EQLINE =
"======================================================================";
static final String LINE =
"----------------------------------------------------------------------";
- static final String TRACEBACK_LINE = "Traceback (most recent call last):";
- static final String CASE_OK = "ok";
- static final String CASE_EXPECTED_FAILURE_1 = "expected";
- static final String CASE_EXPECTED_FAILURE_2 = "failure";
- static final String RUN_OK = "OK";
- static final String RUN_FAILED = "FAILED";
+ static final String TRACEBACK_LINE =
+ "Traceback (most recent call last):";
+
+ static final Pattern PATTERN_TEST_SUCCESS = Pattern.compile("ok|expected failure");
+ static final Pattern PATTERN_TEST_FAILURE = Pattern.compile("FAIL|ERROR");
+ static final Pattern PATTERN_TEST_SKIPPED = Pattern.compile("skipped '.*");
+ static final Pattern PATTERN_TEST_UNEXPECTED_SUCCESS = Pattern.compile("unexpected success");
+
+ static final Pattern PATTERN_ONE_LINE_RESULT = Pattern.compile(
+ "(\\S*) \\((\\S*)\\) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+ static final Pattern PATTERN_TWO_LINE_RESULT_FIRST = Pattern.compile(
+ "(\\S*) \\((\\S*)\\)");
+ static final Pattern PATTERN_TWO_LINE_RESULT_SECOND = Pattern.compile(
+ "(.*) ... (ok|expected failure|FAIL|ERROR|skipped '.*'|unexpected success)");
+ static final Pattern PATTERN_FAIL_MESSAGE = Pattern.compile(
+ "(FAIL|ERROR): (\\S*) \\((\\S*)\\)");
+ static final Pattern PATTERN_RUN_SUMMARY = Pattern.compile(
+ "Ran (\\d+) tests? in (\\d+(.\\d*)?)s");
+
+ static final Pattern PATTERN_RUN_RESULT = Pattern.compile("(OK|FAILED).*");
/**
* Keeps track of the state the parser is currently in.
@@ -107,20 +166,21 @@
* State progression:
*
* v------,
- * TEST_CASE-'->[failed?]-(n)->TEST_SUMMARY-->TEST_STATUS-->COMPLETE
+ * TEST_CASE-'->[failed?]-(n)-->RUN_SUMMARY-->RUN_RESULT-->COMPLETE
* | ^
* (y) '------(n)--,
- * | ,-TEST_TRACEBACK->[more?]
+ * | ,---TRACEBACK---->[more?]
* v v ^ |
* FAIL_MESSAGE ---' (y)
* ^-------------------'
*/
static enum ParserState {
TEST_CASE,
- TEST_TRACEBACK,
- TEST_SUMMARY,
- TEST_STATUS,
+ TRACEBACK,
+ RUN_SUMMARY,
+ RUN_RESULT,
FAIL_MESSAGE,
+ FAIL_MESSAGE_OPTIONAL_DOCSTRING,
COMPLETE
}
@@ -166,92 +226,115 @@
void parse() throws PythonUnitTestParseException {
switch (mCurrentParseState) {
case TEST_CASE:
- testResult();
+ testCase();
break;
- case TEST_TRACEBACK:
+ case TRACEBACK:
traceback();
break;
- case TEST_SUMMARY:
- summary();
+ case RUN_SUMMARY:
+ runSummary();
break;
- case TEST_STATUS:
- completeTestRun();
+ case RUN_RESULT:
+ runResult();
break;
case FAIL_MESSAGE:
failMessage();
break;
+ case FAIL_MESSAGE_OPTIONAL_DOCSTRING:
+ failMessageOptionalDocstring();
+ break;
case COMPLETE:
break;
}
}
- void testResult() throws PythonUnitTestParseException {
- // we're at the end of the TEST_CASE section
+ void testCase() throws PythonUnitTestParseException {
+ // separate line before traceback message
if (eqline()) {
mCurrentParseState = ParserState.FAIL_MESSAGE;
return;
}
+ // separate line before test summary
if (line()) {
- mCurrentParseState = ParserState.TEST_SUMMARY;
+ mCurrentParseState = ParserState.RUN_SUMMARY;
+ return;
+ }
+ // empty line preceding the separate line
+ if (emptyLine()) {
+ // skip
return;
}
// actually process the test case
mCurrentParseState = ParserState.TEST_CASE;
- String[] toks = mCurrentLine.split(" ");
- try {
- String testName = toks[0];
- // strip surrounding parens from class name
- String testClass = toks[1].substring(1, toks[1].length() - 1);
- mCurrentTestId = new TestIdentifier(testClass, testName);
- // 3rd token is just "..."
- if (toks.length == 4) {
- // one-word status ("ok" | "ERROR")
- String status = toks[3];
- if (CASE_OK.equals(status)) {
- markTestSuccess();
- }
- // if there's an error just do nothing, we can't get the trace
- // immediately anyway
- } else if (toks.length == 5) {
- // two-word status ("expected failure")
- String status1 = toks[3];
- String status2 = toks[4];
- if (CASE_EXPECTED_FAILURE_1.equals(status1)
- && CASE_EXPECTED_FAILURE_2.equals(status2)) {
- markTestSuccess();
- }
- } else {
- parseError("TestResult");
+ String testName = null, testClass = null, status = null;
+ Matcher m = PATTERN_ONE_LINE_RESULT.matcher(mCurrentLine);
+ if (m.matches()) {
+ // one line test result
+ testName = m.group(1);
+ testClass = m.group(2);
+ status = m.group(3);
+ } else {
+ // two line test result
+ Matcher m1 = PATTERN_TWO_LINE_RESULT_FIRST.matcher(mCurrentLine);
+ if (!m1.matches()) {
+ parseError("Test case and result");
}
- } catch (ArrayIndexOutOfBoundsException e) {
- CLog.d("Underlying error in testResult: " + e);
- throw new PythonUnitTestParseException("FailMessage");
+ testName = m1.group(1);
+ testClass = m1.group(2);
+ if (!advance()) {
+ parseError("Second line of test result");
+ }
+ Matcher m2 = PATTERN_TWO_LINE_RESULT_SECOND.matcher(mCurrentLine);
+ if (!m2.matches()) {
+ parseError("Second line of test result");
+ }
+ status = m2.group(2);
+ }
+ mCurrentTestId = new TestIdentifier(testClass, testName);
+ if (PATTERN_TEST_SUCCESS.matcher(status).matches()) {
+ markTestSuccess();
+ } else if (PATTERN_TEST_SKIPPED.matcher(status).matches()) {
+ markTestSkipped();
+ } else if (PATTERN_TEST_UNEXPECTED_SUCCESS.matcher(status).matches()) {
+ markTestUnexpectedSuccess();
+ } else if (PATTERN_TEST_FAILURE.matcher(status).matches()) {
+ // Do nothing because we can't get the trace immediately
+ } else {
+ throw new PythonUnitTestParseException("Unrecognized test status");
}
}
void failMessage() throws PythonUnitTestParseException {
- // traceback is starting
- if (line()) {
- mCurrentParseState = ParserState.TEST_TRACEBACK;
- mCurrentTraceback = new StringBuilder();
- return;
+ Matcher m = PATTERN_FAIL_MESSAGE.matcher(mCurrentLine);
+ if (!m.matches()) {
+ throw new PythonUnitTestParseException("Failed to parse test failure message");
}
- String[] toks = mCurrentLine.split(" ");
- // 1st token is "ERROR:"
- try {
- String testName = toks[1];
- String testClass = toks[2].substring(1, toks[2].length() - 1);
- mCurrentTestId = new TestIdentifier(testClass, testName);
- } catch (ArrayIndexOutOfBoundsException e) {
- CLog.d("Underlying error in failMessage: " + e);
- throw new PythonUnitTestParseException("FailMessage");
+ String testName = m.group(2);
+ String testClass = m.group(3);
+ mCurrentTestId = new TestIdentifier(testClass, testName);
+ mCurrentParseState = ParserState.FAIL_MESSAGE_OPTIONAL_DOCSTRING;
+ }
+
+ void failMessageOptionalDocstring() throws PythonUnitTestParseException {
+ // skip the optional docstring line if there is one; do nothing otherwise
+ if (!line()) {
+ advance();
}
+ preTraceback();
+ }
+
+ void preTraceback() throws PythonUnitTestParseException {
+ if (!line()) {
+ throw new PythonUnitTestParseException("Failed to parse test failure message");
+ }
+ mCurrentParseState = ParserState.TRACEBACK;
+ mCurrentTraceback = new StringBuilder();
}
void traceback() throws PythonUnitTestParseException {
// traceback is always terminated with LINE or EQLINE
- while (!mCurrentLine.startsWith(LINE) && !mCurrentLine.startsWith(EQLINE)) {
+ while (!line() && !eqline()) {
mCurrentTraceback.append(mCurrentLine);
if (!advance()) return;
}
@@ -260,7 +343,7 @@
markTestFailure();
// move on to the next section
if (line()) {
- mCurrentParseState = ParserState.TEST_SUMMARY;
+ mCurrentParseState = ParserState.RUN_SUMMARY;
}
else if (eqline()) {
mCurrentParseState = ParserState.FAIL_MESSAGE;
@@ -270,24 +353,27 @@
}
}
- void summary() throws PythonUnitTestParseException {
- String[] toks = mCurrentLine.split(" ");
+ void runSummary() throws PythonUnitTestParseException {
+ Matcher m = PATTERN_RUN_SUMMARY.matcher(mCurrentLine);
+ if (!m.matches()) {
+ throw new PythonUnitTestParseException("Failed to parse test summary");
+ }
double time = 0;
try {
- mTotalTestCount = Integer.parseInt(toks[1]);
+ mTotalTestCount = Integer.parseInt(m.group(1));
} catch (NumberFormatException e) {
parseError("integer");
}
try {
- time = Double.parseDouble(toks[4].substring(0, toks[4].length() - 1));
+ time = Double.parseDouble(m.group(2));
} catch (NumberFormatException e) {
parseError("double");
}
mTotalElapsedTime = (long) time * 1000;
- mCurrentParseState = ParserState.TEST_STATUS;
+ mCurrentParseState = ParserState.RUN_RESULT;
}
- boolean completeTestRun() throws PythonUnitTestParseException {
+ void runResult() throws PythonUnitTestParseException {
String failReason = String.format("Failed %d tests", mFailedTestCount);
for (ITestRunListener listener: mListeners) {
// do testRunStarted
@@ -296,22 +382,25 @@
// mark each test passed or failed
for (Entry<TestIdentifier, String> test : mTestResultCache.entrySet()) {
listener.testStarted(test.getKey());
- if (test.getValue() != null) {
+ if (SKIPPED_ENTRY.equals(test.getValue())) {
+ listener.testIgnored(test.getKey());
+ } else if (test.getValue() != null) {
listener.testFailed(test.getKey(), test.getValue());
}
listener.testEnded(test.getKey(), Collections.<String, String>emptyMap());
}
// mark the whole run as passed or failed
- if (mCurrentLine.startsWith(RUN_FAILED)) {
+ // do not rely on the final result message, because Python consider "unexpected success"
+ // passed while we consider it failed
+ if (!PATTERN_RUN_RESULT.matcher(mCurrentLine).matches()) {
+ parseError("Status");
+ }
+ if (mFailedTestCount > 0) {
listener.testRunFailed(failReason);
}
listener.testRunEnded(mTotalElapsedTime, Collections.<String, String>emptyMap());
- if (!mCurrentLine.startsWith(RUN_FAILED) && !mCurrentLine.startsWith(RUN_OK)) {
- parseError("Status");
- }
}
- return true;
}
boolean eqline() {
@@ -326,6 +415,10 @@
return mCurrentLine.startsWith(TRACEBACK_LINE);
}
+ boolean emptyLine() {
+ return mCurrentLine.isEmpty();
+ }
+
/**
* Advance to the next non-empty line.
* @return true if a non-empty line was found, false otherwise.
@@ -356,6 +449,18 @@
mFailedTestCount++;
}
+ private void markTestSkipped() {
+ mTestResultCache.put(mCurrentTestId, SKIPPED_ENTRY);
+ }
+
+ private void markTestUnexpectedSuccess() {
+ // In Python unittest, "unexpected success" (tests that are marked with
+ // @unittest.expectedFailure but passed) will not fail the entire test run.
+ // This behaviour is usually not desired, and such test should be treated as failed.
+ mTestResultCache.put(mCurrentTestId, "Test unexpected succeeded");
+ mFailedTestCount++;
+ }
+
@Override
public boolean isCancelled() {
return false;
diff --git a/src/com/android/tradefed/testtype/StubTest.java b/src/com/android/tradefed/testtype/StubTest.java
index 0809e41..d0d7ff8 100644
--- a/src/com/android/tradefed/testtype/StubTest.java
+++ b/src/com/android/tradefed/testtype/StubTest.java
@@ -27,6 +27,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
/**
@@ -91,7 +92,7 @@
TestIdentifier testId = new TestIdentifier("StubTest", "StubMethod");
listener.testStarted(testId);
listener.testEnded(testId, Collections.emptyMap());
- listener.testRunEnded(500, Collections.emptyMap());
+ listener.testRunEnded(500, new LinkedHashMap<>());
}
}
diff --git a/src/com/android/tradefed/testtype/suite/ITestSuite.java b/src/com/android/tradefed/testtype/suite/ITestSuite.java
index 7949afb..90ed34c 100644
--- a/src/com/android/tradefed/testtype/suite/ITestSuite.java
+++ b/src/com/android/tradefed/testtype/suite/ITestSuite.java
@@ -26,6 +26,7 @@
import com.android.tradefed.invoker.IInvocationContext;
import com.android.tradefed.log.LogUtil.CLog;
import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.result.ITestLoggerReceiver;
import com.android.tradefed.result.InputStreamSource;
import com.android.tradefed.result.LogDataType;
import com.android.tradefed.suite.checker.ISystemStatusChecker;
@@ -193,11 +194,18 @@
return;
}
+ // Allow checkers to log files for easier debbuging.
+ for (ISystemStatusChecker checker : mSystemStatusCheckers) {
+ if (checker instanceof ITestLoggerReceiver) {
+ ((ITestLoggerReceiver) checker).setTestLogger(listener);
+ }
+ }
+
/** Setup a special listener to take actions on test failures. */
TestFailureListener failureListener =
new TestFailureListener(
listener,
- getDevice(),
+ mContext.getDevices(),
mBugReportOnFailure,
mLogcatOnFailure,
mScreenshotOnFailure,
@@ -225,7 +233,7 @@
}
try {
- mContext.setModuleInvocationContext(module.getModuleInvocationContext());
+ listener.testModuleStarted(module.getModuleInvocationContext());
// Populate the module context with devices and builds
for (String deviceName : mContext.getDeviceConfigNames()) {
module.getModuleInvocationContext()
@@ -237,7 +245,7 @@
} finally {
// clear out module invocation context since we are now done with module
// execution
- mContext.setModuleInvocationContext(null);
+ listener.testModuleEnded();
}
}
} catch (DeviceNotAvailableException e) {
@@ -500,7 +508,7 @@
@Override
public long getRuntimeHint() {
if (mDirectModule != null) {
- CLog.e(
+ CLog.d(
" %s: %s",
mDirectModule.getId(),
TimeUtil.formatElapsedTime(mDirectModule.getRuntimeHint()));
diff --git a/src/com/android/tradefed/testtype/suite/ModuleDefinition.java b/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
index 3150396..174cdc9 100644
--- a/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
+++ b/src/com/android/tradefed/testtype/suite/ModuleDefinition.java
@@ -257,7 +257,10 @@
if (preparationException != null) {
// For reporting purpose we create a failure placeholder with the error stack
// similar to InitializationError of JUnit.
- TestIdentifier testid = new TestIdentifier(getId(), "PreparationError");
+ TestIdentifier testid =
+ new TestIdentifier(
+ preparationException.getClass().getCanonicalName(),
+ "preparationError");
listener.testRunStarted(getId(), 1);
listener.testStarted(testid);
StringWriter sw = new StringWriter();
@@ -363,6 +366,9 @@
getId(), StreamUtil.getStackTrace(tearDownException));
throw tearDownException;
} finally {
+ if (failureListener != null) {
+ failureListener.join();
+ }
mElapsedTearDown = getCurrentTime() - cleanStartTime;
// finalize results
if (preparationException == null) {
diff --git a/src/com/android/tradefed/testtype/suite/TestFailureListener.java b/src/com/android/tradefed/testtype/suite/TestFailureListener.java
index a92e671..84035d3 100644
--- a/src/com/android/tradefed/testtype/suite/TestFailureListener.java
+++ b/src/com/android/tradefed/testtype/suite/TestFailureListener.java
@@ -27,7 +27,9 @@
import com.google.common.annotations.VisibleForTesting;
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
/**
@@ -40,8 +42,9 @@
/* Arbitrary upper limit for mMaxLogcatBytes to avoid un-reasonably high limit */
private static final int LOGCAT_BYTE_LIMIT = 20 * 1024 * 1024; // 20 MB
private static final String LOGCAT_ON_FAILURE_SIZE_OPTION = "logcat-on-failure-size";
+ private static final long LOGCAT_CAPTURE_TIMEOUT = 2 * 60 * 1000;
- private ITestDevice mDevice;
+ private List<ITestDevice> mListDevice;
private ITestInvocationListener mListener;
private boolean mBugReportOnFailure;
private boolean mLogcatOnFailure;
@@ -49,12 +52,18 @@
private boolean mRebootOnFailure;
private int mMaxLogcatBytes;
private Map<TestIdentifier, Long> mTrackStartTime = new HashMap<>();
+ private List<Thread> mLogcatThreads = new ArrayList<>();
- public TestFailureListener(ITestInvocationListener listener, ITestDevice device,
- boolean bugReportOnFailure, boolean logcatOnFailure, boolean screenshotOnFailure,
- boolean rebootOnFailure, int maxLogcatBytes) {
+ public TestFailureListener(
+ ITestInvocationListener listener,
+ List<ITestDevice> devices,
+ boolean bugReportOnFailure,
+ boolean logcatOnFailure,
+ boolean screenshotOnFailure,
+ boolean rebootOnFailure,
+ int maxLogcatBytes) {
mListener = listener;
- mDevice = device;
+ mListDevice = devices;
mBugReportOnFailure = bugReportOnFailure;
mLogcatOnFailure = logcatOnFailure;
mScreenshotOnFailure = screenshotOnFailure;
@@ -80,7 +89,7 @@
public void testStarted(TestIdentifier test) {
if (mLogcatOnFailure) {
try {
- mTrackStartTime.put(test, mDevice.getDeviceDate());
+ mTrackStartTime.put(test, mListDevice.get(0).getDeviceDate());
} catch (DeviceNotAvailableException e) {
CLog.e(e);
// we fall back to logcat dump on null.
@@ -106,59 +115,102 @@
public void testFailed(TestIdentifier test, String trace) {
CLog.i("FailureListener.testFailed %s %b %b %b", test.toString(), mBugReportOnFailure,
mLogcatOnFailure, mScreenshotOnFailure);
+ for (ITestDevice device : mListDevice) {
+ captureFailure(device, test);
+ }
+ }
+
+ /** Capture the appropriate logs for one device for one test failure. */
+ private void captureFailure(ITestDevice device, TestIdentifier test) {
+ String serial = device.getSerialNumber();
if (mScreenshotOnFailure) {
try {
- try (InputStreamSource screenSource = mDevice.getScreenshot()) {
+ try (InputStreamSource screenSource = device.getScreenshot()) {
testLog(
- String.format("%s-screenshot", test.toString()),
+ String.format("%s-%s-screenshot", test.toString(), serial),
LogDataType.PNG,
screenSource);
}
} catch (DeviceNotAvailableException e) {
CLog.e(e);
- CLog.e("Device %s became unavailable while capturing screenshot",
- mDevice.getSerialNumber());
+ CLog.e("Device %s became unavailable while capturing screenshot", serial);
}
}
if (mBugReportOnFailure) {
- try (InputStreamSource bugSource = mDevice.getBugreport()) {
+ try (InputStreamSource bugSource = device.getBugreportz()) {
testLog(
- String.format("%s-bugreport", test.toString()),
- LogDataType.BUGREPORT,
+ String.format("%s-%s-bugreport", test.toString(), serial),
+ LogDataType.BUGREPORTZ,
bugSource);
}
}
if (mLogcatOnFailure) {
- InputStreamSource logSource = null;
- Long startTime = mTrackStartTime.remove(test);
- if (startTime != null) {
- logSource = mDevice.getLogcatSince(startTime);
+ Runnable captureLogcat =
+ new Runnable() {
+ @Override
+ public void run() {
+ InputStreamSource logSource = null;
+ Long startTime = mTrackStartTime.remove(test);
+ if (startTime != null) {
+ logSource = device.getLogcatSince(startTime);
+ } else {
+ // sleep 2s to ensure test failure stack trace makes it into the
+ // logcat capture
+ getRunUtil().sleep(2 * 1000);
+ logSource = device.getLogcat(mMaxLogcatBytes);
+ }
+ testLog(
+ String.format("%s-%s-logcat", test.toString(), serial),
+ LogDataType.LOGCAT,
+ logSource);
+ logSource.close();
+ }
+ };
+ if (mRebootOnFailure) {
+ captureLogcat.run();
} else {
- // sleep 2s to ensure test failure stack trace makes it into logcat capture
- getRunUtil().sleep(2 * 1000);
- logSource = mDevice.getLogcat(mMaxLogcatBytes);
+ // If no reboot will be done afterward capture asynchronously the logcat.
+ Thread captureThread =
+ new Thread(captureLogcat, String.format("Capture failure logcat %s", test));
+ captureThread.setDaemon(true);
+ mLogcatThreads.add(captureThread);
+ captureThread.start();
}
- testLog(String.format("%s-logcat", test.toString()), LogDataType.LOGCAT, logSource);
- logSource.close();
}
if (mRebootOnFailure) {
try {
// Rebooting on all failures can hide legitimate issues and platform instabilities,
// therefore only allowed on "user-debug" and "eng" builds.
- if ("user".equals(mDevice.getProperty("ro.build.type"))) {
+ if ("user".equals(device.getProperty("ro.build.type"))) {
CLog.e("Reboot-on-failure should only be used during development," +
" this is a\" user\" build device");
} else {
- mDevice.reboot();
+ device.reboot();
}
} catch (DeviceNotAvailableException e) {
CLog.e(e);
- CLog.e("Device %s became unavailable while rebooting",
- mDevice.getSerialNumber());
+ CLog.e("Device %s became unavailable while rebooting", serial);
}
}
}
+ /** Join on all the logcat capturing threads to ensure they terminate. */
+ public void join() {
+ synchronized (mLogcatThreads) {
+ for (Thread t : mLogcatThreads) {
+ if (!t.isAlive()) {
+ continue;
+ }
+ try {
+ t.join(LOGCAT_CAPTURE_TIMEOUT);
+ } catch (InterruptedException e) {
+ CLog.e(e);
+ }
+ }
+ mLogcatThreads.clear();
+ }
+ }
+
@Override
public void testLog(String dataName, LogDataType dataType, InputStreamSource dataStream) {
mListener.testLog(dataName, dataType, dataStream);
diff --git a/src/com/android/tradefed/testtype/suite/TestSuiteInfo.java b/src/com/android/tradefed/testtype/suite/TestSuiteInfo.java
new file mode 100644
index 0000000..8a6c36e
--- /dev/null
+++ b/src/com/android/tradefed/testtype/suite/TestSuiteInfo.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.testtype.suite;
+
+import com.android.tradefed.log.LogUtil.CLog;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+/**
+ * A class that resolves loading of build related metadata for test suite
+ * <p>
+ * To properly expose related info, a test suite must include a
+ * <code>test-suite-info.properties</code> file in its jar resources
+ */
+public class TestSuiteInfo {
+
+ /** expected property filename in jar resource */
+ private static final String SUITE_INFO_PROPERTY = "/test-suite-info.properties";
+ /** suite info keys */
+ private static final String BUILD_NUMBER = "build_number";
+ private static final String TARGET_ARCH = "target_arch";
+ private static final String NAME = "name";
+ private static final String FULLNAME = "fullname";
+ private static final String VERSION = "version";
+
+ private static TestSuiteInfo sInstance;
+ private Properties mTestSuiteInfo;
+
+ private TestSuiteInfo() {
+ try (InputStream is = TestSuiteInfo.class.getResourceAsStream(SUITE_INFO_PROPERTY)) {
+ if (is != null) {
+ mTestSuiteInfo = loadSuiteInfo(is);
+ } else {
+ CLog.w("Unable to load suite info from jar resource %s, using stub info instead",
+ SUITE_INFO_PROPERTY);
+ mTestSuiteInfo = new Properties();
+ mTestSuiteInfo.setProperty(BUILD_NUMBER, "[stub build number]");
+ mTestSuiteInfo.setProperty(TARGET_ARCH, "[stub target arch]");
+ mTestSuiteInfo.setProperty(NAME, "[stub name]");
+ mTestSuiteInfo.setProperty(FULLNAME, "[stub fullname]");
+ mTestSuiteInfo.setProperty(VERSION, "[stub version]");
+ }
+ } catch (IOException ioe) {
+ // rethrow as runtime exception
+ throw new RuntimeException(String.format(
+ "error loading jar resource file \"%s\" for test suite info",
+ SUITE_INFO_PROPERTY));
+ }
+ }
+
+ /** Performs the actual loading of properties */
+ protected Properties loadSuiteInfo(InputStream is) throws IOException {
+ Properties p = new Properties();
+ p.load(is);
+ return p;
+ }
+
+ /**
+ * Retrieves the singleton instance, which also triggers loading of the related test suite info
+ * from embedded resource files
+ * @return
+ */
+ public static TestSuiteInfo getInstance() {
+ if (sInstance == null) {
+ sInstance = new TestSuiteInfo();
+ }
+ return sInstance;
+ }
+
+ /** Gets the build number of the test suite */
+ public String getBuildNumber() {
+ return mTestSuiteInfo.getProperty(BUILD_NUMBER);
+ }
+
+ /** Gets the target archs supported by the test suite */
+ public String getTargetArch() {
+ return mTestSuiteInfo.getProperty(TARGET_ARCH);
+ }
+
+ /** Gets the short name of the test suite */
+ public String getName() {
+ return mTestSuiteInfo.getProperty(NAME);
+ }
+
+ /** Gets the full name of the test suite */
+ public String getFullName() {
+ return mTestSuiteInfo.getProperty(FULLNAME);
+ }
+
+ /** Gets the version name of the test suite */
+ public String getVersion() {
+ return mTestSuiteInfo.getProperty(VERSION);
+ }
+
+ /**
+ * Retrieves test information keyed with the provided name
+ * @param name
+ * @return
+ */
+ public String get(String name) {
+ return mTestSuiteInfo.getProperty(name);
+ }
+}
diff --git a/src/com/android/tradefed/testtype/suite/TfSuiteRunner.java b/src/com/android/tradefed/testtype/suite/TfSuiteRunner.java
index 4afac1c..eb556b9 100644
--- a/src/com/android/tradefed/testtype/suite/TfSuiteRunner.java
+++ b/src/com/android/tradefed/testtype/suite/TfSuiteRunner.java
@@ -34,6 +34,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
@@ -43,6 +44,8 @@
*/
public class TfSuiteRunner extends ITestSuite {
+ private static final String CONFIG_EXT = ".config";
+
@Option(name = "run-suite-tag", description = "The tag that must be run.",
mandatory = true)
private String mSuiteTag = null;
@@ -82,7 +85,9 @@
new LinkedHashMap<String, IConfiguration>();
IConfigurationFactory configFactory = ConfigurationFactory.getInstance();
// TODO: Do a better job searching for configs.
- List<String> configs = configFactory.getConfigList(mSuitePrefix);
+ // We do not load config from environment, they should be inside the testsDir of the build
+ // info.
+ List<String> configs = configFactory.getConfigList(mSuitePrefix, false);
if (getBuildInfo() instanceof IDeviceBuildInfo) {
IDeviceBuildInfo deviceBuildInfo = (IDeviceBuildInfo) getBuildInfo();
@@ -117,7 +122,8 @@
ConfigurationUtil.getConfigNamesFromDirs(mSuitePrefix, extraTestCasesDirs));
}
}
-
+ // Sort configs to ensure they are always evaluated and added in the same order.
+ Collections.sort(configs);
for (String configName : configs) {
try {
IConfiguration testConfig =
@@ -175,7 +181,7 @@
}
// If we have any IRemoteTests remaining in the base configuration, it will run.
if (!config.getTests().isEmpty()) {
- configMap.put(configName, config);
+ configMap.put(sanitizeModuleName(configName), config);
}
return configMap;
@@ -184,4 +190,24 @@
private String getSuiteTag() {
return mSuiteTag;
}
+
+ /**
+ * Some module names are currently the absolute path name of some config files. We want to
+ * sanitize that look more like a short included config name.
+ */
+ private String sanitizeModuleName(String originalName) {
+ if (originalName.endsWith(CONFIG_EXT)) {
+ originalName = originalName.substring(0, originalName.length() - CONFIG_EXT.length());
+ }
+ if (!originalName.startsWith("/")) {
+ return originalName;
+ }
+ // if it's an absolute path
+ String[] segments = originalName.split("/");
+ if (segments.length < 3) {
+ return originalName;
+ }
+ // return last two segments only
+ return String.join("/", segments[segments.length - 2], segments[segments.length - 1]);
+ }
}
diff --git a/src/com/android/tradefed/testtype/suite/ValidateSuiteConfigHelper.java b/src/com/android/tradefed/testtype/suite/ValidateSuiteConfigHelper.java
index 914037f..11701a0 100644
--- a/src/com/android/tradefed/testtype/suite/ValidateSuiteConfigHelper.java
+++ b/src/com/android/tradefed/testtype/suite/ValidateSuiteConfigHelper.java
@@ -26,6 +26,8 @@
*/
public class ValidateSuiteConfigHelper {
+ private ValidateSuiteConfigHelper() {}
+
/**
* Check that a configuration is properly built to run in a suite.
*
diff --git a/src/com/android/tradefed/util/FileUtil.java b/src/com/android/tradefed/util/FileUtil.java
index 2f87111..e394653 100644
--- a/src/com/android/tradefed/util/FileUtil.java
+++ b/src/com/android/tradefed/util/FileUtil.java
@@ -415,7 +415,10 @@
public static void simlinkFile(File origFile, File destFile) throws IOException {
CommandResult res = linkFile(origFile, destFile, true);
if (!CommandStatus.SUCCESS.equals(res.getStatus())) {
- throw new IOException("Error trying to simlink: " + res.getStderr());
+ throw new IOException(
+ String.format(
+ "Error trying to simlink: %s\nstdout:%s\nstderr:%s",
+ res.getStatus(), res.getStdout(), res.getStderr()));
}
}
@@ -435,7 +438,7 @@
cmd.add(origFile.getAbsolutePath());
cmd.add(destFile.getAbsolutePath());
CommandResult result =
- RunUtil.getDefault().runTimedCmd(10 * 1000, cmd.toArray(new String[0]));
+ RunUtil.getDefault().runTimedCmdSilently(10 * 1000, cmd.toArray(new String[0]));
return result;
}
diff --git a/src/com/android/tradefed/util/RunUtil.java b/src/com/android/tradefed/util/RunUtil.java
index 9eb762a0..e8ec464 100644
--- a/src/com/android/tradefed/util/RunUtil.java
+++ b/src/com/android/tradefed/util/RunUtil.java
@@ -319,10 +319,12 @@
boolean logErrors) {
checkInterrupted();
RunnableNotifier runThread = new RunnableNotifier(runnable, logErrors);
- if (timeout > 0l) {
- CLog.d("Running command with timeout: %dms", timeout);
- } else {
- CLog.d("Running command without timeout.");
+ if (logErrors) {
+ if (timeout > 0l) {
+ CLog.d("Running command with timeout: %dms", timeout);
+ } else {
+ CLog.d("Running command without timeout.");
+ }
}
runThread.start();
long startTime = System.currentTimeMillis();
@@ -722,6 +724,13 @@
}
}
}
+
+ @Override
+ public String toString() {
+ return "RunnableResult [command="
+ + ((mProcessBuilder != null) ? mProcessBuilder.command() : null)
+ + "]";
+ }
}
/**
diff --git a/src/com/android/tradefed/util/SystemUtil.java b/src/com/android/tradefed/util/SystemUtil.java
index b1e9284..727ad91 100644
--- a/src/com/android/tradefed/util/SystemUtil.java
+++ b/src/com/android/tradefed/util/SystemUtil.java
@@ -25,9 +25,8 @@
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashSet;
import java.util.List;
-import java.util.Set;
+
/** Utility class for making system calls. */
public class SystemUtil {
@@ -63,14 +62,17 @@
/** Get a list of {@link File} pointing to tests directories external to Tradefed. */
public static List<File> getExternalTestCasesDirs() {
List<File> testCasesDirs = new ArrayList<File>();
- // TODO(b/36782030): Add ENV_ANDROID_HOST_OUT_TESTCASES back to the list.
- Set<String> testCasesDirNames =
- new HashSet<String>(
- Arrays.asList(singleton.getEnv(ENV_ANDROID_TARGET_OUT_TESTCASES)));
+ // TODO(b/36782030): Support running both HOST and TARGET tests.
+ List<String> testCasesDirNames =
+ // List order matters. ConfigurationFactory caller uses first dir with test config.
+ Arrays.asList(
+ singleton.getEnv(ENV_ANDROID_TARGET_OUT_TESTCASES),
+ singleton.getEnv(ENV_ANDROID_HOST_OUT_TESTCASES));
for (String testCasesDirName : testCasesDirNames) {
if (testCasesDirName != null) {
File dir = new File(testCasesDirName);
if (dir.exists() && dir.isDirectory()) {
+ CLog.d("Found test case dir: %s", testCasesDirName);
testCasesDirs.add(dir);
} else {
CLog.w(
diff --git a/src/com/android/tradefed/util/ZipUtil2.java b/src/com/android/tradefed/util/ZipUtil2.java
index 17046e9..aa9256d 100644
--- a/src/com/android/tradefed/util/ZipUtil2.java
+++ b/src/com/android/tradefed/util/ZipUtil2.java
@@ -72,6 +72,20 @@
}
/**
+ * Utility method to extract a zip file into a given directory. The zip file being presented as
+ * a {@link File}.
+ *
+ * @param zipFile a {@link File} pointing to a zip file.
+ * @param destDir the local dir to extract file to
+ * @throws IOException if failed to extract file
+ */
+ public static void extractZip(File zipFile, File destDir) throws IOException {
+ try (ZipFile zip = new ZipFile(zipFile)) {
+ extractZip(zip, destDir);
+ }
+ }
+
+ /**
* Utility method to extract one specific file from zip file into a tmp file
*
* @param zipFile the {@link ZipFile} to extract
diff --git a/tests/Android.mk b/tests/Android.mk
index a392019..35dd15e 100644
--- a/tests/Android.mk
+++ b/tests/Android.mk
@@ -26,7 +26,7 @@
LOCAL_MODULE := tradefed-tests
LOCAL_MODULE_TAGS := optional
-LOCAL_STATIC_JAVA_LIBRARIES := easymock objenesis-host mockito-host commons-compress-prebuilt
+LOCAL_STATIC_JAVA_LIBRARIES := easymock objenesis-host mockito-host commons-compress-prebuilt truth-host-prebuilt
LOCAL_JAVA_LIBRARIES := tradefed host-libprotobuf-java-full
LOCAL_JAR_MANIFEST := MANIFEST.mf
diff --git a/tests/res/testconfigs/multi-device-incorrect-include.xml b/tests/res/testconfigs/multi-device-incorrect-include.xml
new file mode 100644
index 0000000..ac02418
--- /dev/null
+++ b/tests/res/testconfigs/multi-device-incorrect-include.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Multi device parsing, where an incldue also containing device is inside a device">
+ <device name="device1">
+ <!-- This is incorrect because it includes a device inside a device -->
+ <include name="multi-device-empty" />
+ </device>
+</configuration>
diff --git a/tests/res/testconfigs/test-config-multi-include.xml b/tests/res/testconfigs/test-config-multi-include.xml
new file mode 100644
index 0000000..84f6c09
--- /dev/null
+++ b/tests/res/testconfigs/test-config-multi-include.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration
+ description="test for multiple device receiving options, with an include">
+
+ <device name="device1" >
+ <build_provider class="com.android.tradefed.build.StubBuildProvider" />
+ <device_recovery class="com.android.tradefed.device.WaitDeviceRecovery" />
+ <target_preparer class="com.android.tradefed.targetprep.StubTargetPreparer" />
+ </device>
+ <device name="device2" >
+ <include name="mandatory-config" />
+ <target_preparer class="com.android.tradefed.targetprep.StubTargetPreparer" />
+ </device>
+</configuration>
diff --git a/tests/res/testconfigs/test-config-multi.xml b/tests/res/testconfigs/test-config-multi.xml
new file mode 100644
index 0000000..3cf3de2
--- /dev/null
+++ b/tests/res/testconfigs/test-config-multi.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2017 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration
+ description="test for multiple device receiving options">
+
+ <device name="device1" >
+ <build_provider class="com.android.tradefed.build.StubBuildProvider" />
+ <device_recovery class="com.android.tradefed.device.WaitDeviceRecovery" />
+ <target_preparer class="com.android.tradefed.targetprep.StubTargetPreparer" />
+ </device>
+ <device name="device2" >
+ <target_preparer class="com.android.tradefed.targetprep.StubTargetPreparer" />
+ </device>
+
+ <test class="com.android.tradefed.config.StubOptionTest" >
+ <option name="option" value="valueFromTestConfig" />
+ </test>
+ <logger class="com.android.tradefed.log.FileLogger" />
+ <result_reporter class="com.android.tradefed.result.XmlResultReporter" />
+
+</configuration>
diff --git a/tests/src/com/android/tradefed/UnitTests.java b/tests/src/com/android/tradefed/UnitTests.java
index c7a39ac..2d35fb7 100644
--- a/tests/src/com/android/tradefed/UnitTests.java
+++ b/tests/src/com/android/tradefed/UnitTests.java
@@ -45,6 +45,7 @@
import com.android.tradefed.config.OptionCopierTest;
import com.android.tradefed.config.OptionSetterTest;
import com.android.tradefed.config.OptionUpdateRuleTest;
+import com.android.tradefed.config.SandboxConfigurationFactoryTest;
import com.android.tradefed.device.BackgroundDeviceActionTest;
import com.android.tradefed.device.CpuStatsCollectorTest;
import com.android.tradefed.device.DeviceManagerTest;
@@ -62,7 +63,10 @@
import com.android.tradefed.device.TopHelperTest;
import com.android.tradefed.device.WaitDeviceRecoveryTest;
import com.android.tradefed.device.WifiHelperTest;
+import com.android.tradefed.device.metric.ScheduledDeviceMetricCollectorTest;
+import com.android.tradefed.device.metric.BaseDeviceMetricCollectorTest;
import com.android.tradefed.invoker.InvocationContextTest;
+import com.android.tradefed.invoker.ShardListenerTest;
import com.android.tradefed.invoker.TestInvocationMultiTest;
import com.android.tradefed.invoker.TestInvocationTest;
import com.android.tradefed.invoker.shard.ShardHelperTest;
@@ -74,6 +78,7 @@
import com.android.tradefed.log.TerribleFailureEmailHandlerTest;
import com.android.tradefed.profiler.AggregatingProfilerTest;
import com.android.tradefed.profiler.MetricOutputDataTest;
+import com.android.tradefed.profiler.recorder.NumericAggregateFunctionTest;
import com.android.tradefed.profiler.recorder.TraceMetricTest;
import com.android.tradefed.profiler.recorder.TraceMetricsRecorderTest;
import com.android.tradefed.profiler.recorder.TraceParserTest;
@@ -96,6 +101,10 @@
import com.android.tradefed.result.TestFailureEmailResultReporterTest;
import com.android.tradefed.result.TestSummaryTest;
import com.android.tradefed.result.XmlResultReporterTest;
+import com.android.tradefed.sandbox.SandboxConfigDumpTest;
+import com.android.tradefed.sandbox.SandboxConfigUtilTest;
+import com.android.tradefed.sandbox.TradefedSandboxTest;
+import com.android.tradefed.suite.checker.ActivityStatusCheckerTest;
import com.android.tradefed.suite.checker.KeyguardStatusCheckerTest;
import com.android.tradefed.suite.checker.SystemServerFileDescriptorCheckerTest;
import com.android.tradefed.suite.checker.SystemServerStatusCheckerTest;
@@ -269,6 +278,7 @@
OptionCopierTest.class,
OptionSetterTest.class,
OptionUpdateRuleTest.class,
+ SandboxConfigurationFactoryTest.class,
// device
BackgroundDeviceActionTest.class,
@@ -290,8 +300,13 @@
WaitDeviceRecoveryTest.class,
WifiHelperTest.class,
+ // device.metric
+ ScheduledDeviceMetricCollectorTest.class,
+ BaseDeviceMetricCollectorTest.class,
+
// invoker
InvocationContextTest.class,
+ ShardListenerTest.class,
TestInvocationMultiTest.class,
TestInvocationTest.class,
@@ -309,6 +324,7 @@
// profiler subdirs
AggregatingProfilerTest.class,
MetricOutputDataTest.class,
+ NumericAggregateFunctionTest.class,
TraceMetricsRecorderTest.class,
TraceMetricTest.class,
TraceParserTest.class,
@@ -362,7 +378,13 @@
// targetprep.suite
SuiteApkInstallerTest.class,
+ // sandbox
+ SandboxConfigDumpTest.class,
+ SandboxConfigUtilTest.class,
+ TradefedSandboxTest.class,
+
// suite/checker
+ ActivityStatusCheckerTest.class,
KeyguardStatusCheckerTest.class,
SystemServerFileDescriptorCheckerTest.class,
SystemServerStatusCheckerTest.class,
diff --git a/tests/src/com/android/tradefed/config/ConfigurationFactoryTest.java b/tests/src/com/android/tradefed/config/ConfigurationFactoryTest.java
index 3a94edd..15b643f 100644
--- a/tests/src/com/android/tradefed/config/ConfigurationFactoryTest.java
+++ b/tests/src/com/android/tradefed/config/ConfigurationFactoryTest.java
@@ -16,6 +16,7 @@
package com.android.tradefed.config;
import com.android.ddmlib.Log.LogLevel;
+import com.android.tradefed.build.LocalDeviceBuildProvider;
import com.android.tradefed.config.ConfigurationFactory.ConfigId;
import com.android.tradefed.log.ILeveledLogOutput;
import com.android.tradefed.log.LogUtil.CLog;
@@ -1210,6 +1211,68 @@
.getDeviceRequirements().getSerials());
}
+ /**
+ * Test that when parsing command line options, boolean options with Device tag and namespace
+ * are correctly assigned.
+ */
+ public void testCreateConfiguration_injectDeviceBooleanOption() throws Exception {
+ IConfiguration config =
+ mFactory.createConfigurationFromArgs(
+ new String[] {
+ "test-config-multi",
+ "--{device1}no-test-boolean-option",
+ "--{device1}test-boolean-option-false",
+ // testing with namespace too
+ "--{device2}stub-preparer:no-test-boolean-option",
+ "--{device2}stub-preparer:test-boolean-option-false"
+ });
+ assertEquals(2, config.getDeviceConfig().size());
+ IDeviceConfiguration device1 = config.getDeviceConfigByName("device1");
+ StubTargetPreparer deviceSetup1 = (StubTargetPreparer) device1.getTargetPreparers().get(0);
+ // default value of test-boolean-option is true, we set it to false
+ assertFalse(deviceSetup1.getTestBooleanOption());
+ // default value of test-boolean-option-false is false, we set it to true.
+ assertTrue(deviceSetup1.getTestBooleanOptionFalse());
+
+ IDeviceConfiguration device2 = config.getDeviceConfigByName("device2");
+ StubTargetPreparer deviceSetup2 = (StubTargetPreparer) device2.getTargetPreparers().get(0);
+ assertFalse(deviceSetup2.getTestBooleanOption());
+ assertTrue(deviceSetup2.getTestBooleanOptionFalse());
+ }
+
+ /** Test that when an <include> tag is used inside a <device> tag we correctly resolve it. */
+ public void testCreateConfiguration_includeInDevice() throws Exception {
+ IConfiguration config =
+ mFactory.createConfigurationFromArgs(
+ new String[] {"test-config-multi-include", "--test-dir", "faketestdir"});
+ assertEquals(2, config.getDeviceConfig().size());
+ IDeviceConfiguration device1 = config.getDeviceConfigByName("device1");
+ assertTrue(device1.getTargetPreparers().get(0) instanceof StubTargetPreparer);
+ // The included config in device2 loads a different build_provider
+ IDeviceConfiguration device2 = config.getDeviceConfigByName("device2");
+ assertTrue(device2.getBuildProvider() instanceof LocalDeviceBuildProvider);
+ LocalDeviceBuildProvider provider = (LocalDeviceBuildProvider) device2.getBuildProvider();
+ // command line options are properly propagated to the included object in device tag.
+ assertEquals("faketestdir", provider.getTestDir().getName());
+ }
+
+ /**
+ * Test when an <include> tag tries to load a <device> tag inside another <device> tag. This
+ * should throw an exception.
+ */
+ public void testCreateConfiguration_includeInDevice_inDevice() throws Exception {
+ try {
+ mFactory.createConfigurationFromArgs(
+ new String[] {
+ "multi-device-incorrect-include",
+ });
+ fail("Should have thrown an exception.");
+ } catch (ConfigurationException expected) {
+ assertEquals(
+ "<device> tag cannot be included inside another device", expected.getMessage());
+ }
+ }
+
/** Test that {@link ConfigurationFactory#reorderArgs(String[])} is properly reordering args. */
public void testReorderArgs_check_ordering() throws Throwable {
String[] args =
diff --git a/tests/src/com/android/tradefed/config/ConfigurationTest.java b/tests/src/com/android/tradefed/config/ConfigurationTest.java
index 4a108c8..4c71639 100644
--- a/tests/src/com/android/tradefed/config/ConfigurationTest.java
+++ b/tests/src/com/android/tradefed/config/ConfigurationTest.java
@@ -26,7 +26,6 @@
import com.android.tradefed.device.IDeviceSelection;
import com.android.tradefed.invoker.InvocationContext;
import com.android.tradefed.log.ILeveledLogOutput;
-import com.android.tradefed.log.LogUtil.CLog;
import com.android.tradefed.result.ITestInvocationListener;
import com.android.tradefed.result.TextResultReporter;
import com.android.tradefed.targetprep.ITargetPreparer;
@@ -638,7 +637,28 @@
String content = FileUtil.readStringFromFile(test);
assertTrue(content.length() > 100);
assertTrue(content.contains("<configuration>"));
- CLog.e("%s", content);
+ assertTrue(content.contains("<test class"));
+ } finally {
+ FileUtil.deleteFile(test);
+ }
+ }
+
+ /**
+ * Test that {@link Configuration#dumpXml(PrintWriter)} produce the xml output without objects
+ * that have been filtered.
+ */
+ public void testDumpXml_withFilter() throws IOException {
+ File test = FileUtil.createTempFile("dumpxml", "xml");
+ try {
+ PrintWriter out = new PrintWriter(test);
+ List<String> filters = new ArrayList<>();
+ filters.add(Configuration.TEST_TYPE_NAME);
+ mConfig.dumpXml(out, filters);
+ out.flush();
+ String content = FileUtil.readStringFromFile(test);
+ assertTrue(content.length() > 100);
+ assertTrue(content.contains("<configuration>"));
+ assertFalse(content.contains("<test class"));
} finally {
FileUtil.deleteFile(test);
}
diff --git a/tests/src/com/android/tradefed/config/ConfigurationUtilTest.java b/tests/src/com/android/tradefed/config/ConfigurationUtilTest.java
index c0603bd..f43e6d3 100644
--- a/tests/src/com/android/tradefed/config/ConfigurationUtilTest.java
+++ b/tests/src/com/android/tradefed/config/ConfigurationUtilTest.java
@@ -27,6 +27,7 @@
import java.io.File;
import java.io.PrintWriter;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
@@ -37,8 +38,8 @@
private static final String DEVICE_MANAGER_TYPE_NAME = "device_manager";
/**
- * Test {@link ConfigurationUtil#dumpClassToXml(KXmlSerializer, String, Object)} to create a
- * dump of a configuration.
+ * Test {@link ConfigurationUtil#dumpClassToXml(KXmlSerializer, String, Object, List)} to create
+ * a dump of a configuration.
*/
@Test
public void testDumpClassToXml() throws Throwable {
@@ -52,7 +53,8 @@
serializer.startTag(null, ConfigurationUtil.CONFIGURATION_NAME);
DeviceManager deviceManager = new DeviceManager();
- ConfigurationUtil.dumpClassToXml(serializer, DEVICE_MANAGER_TYPE_NAME, deviceManager);
+ ConfigurationUtil.dumpClassToXml(
+ serializer, DEVICE_MANAGER_TYPE_NAME, deviceManager, new ArrayList<String>());
serializer.endTag(null, ConfigurationUtil.CONFIGURATION_NAME);
serializer.endDocument();
diff --git a/tests/src/com/android/tradefed/config/ConfigurationXmlParserTest.java b/tests/src/com/android/tradefed/config/ConfigurationXmlParserTest.java
index 5f24da7..b1043c7 100644
--- a/tests/src/com/android/tradefed/config/ConfigurationXmlParserTest.java
+++ b/tests/src/com/android/tradefed/config/ConfigurationXmlParserTest.java
@@ -36,7 +36,7 @@
protected void setUp() throws Exception {
super.setUp();
mMockLoader = EasyMock.createMock(IConfigDefLoader.class);
- xmlParser = new ConfigurationXmlParser(mMockLoader);
+ xmlParser = new ConfigurationXmlParser(mMockLoader, null);
}
/**
@@ -162,12 +162,15 @@
/**
* Test parsing a include tag.
*/
- @SuppressWarnings("unchecked")
public void testParse_include() throws ConfigurationException {
String includedName = "includeme";
ConfigurationDef configDef = new ConfigurationDef("foo");
- mMockLoader.loadIncludedConfiguration(EasyMock.eq(configDef), EasyMock.eq("foo"),
- EasyMock.eq(includedName), (Map<String, String>) EasyMock.anyObject());
+ mMockLoader.loadIncludedConfiguration(
+ EasyMock.eq(configDef),
+ EasyMock.eq("foo"),
+ EasyMock.eq(includedName),
+ EasyMock.anyObject(),
+ EasyMock.anyObject());
EasyMock.replay(mMockLoader);
final String config = "<include name=\"includeme\" />";
xmlParser.parse(configDef, "foo", getStringAsStream(config), null);
@@ -180,8 +183,8 @@
String includedName = "non-existent";
ConfigurationDef parent = new ConfigurationDef("name");
ConfigurationException exception = new ConfigurationException("I don't exist");
- mMockLoader.loadIncludedConfiguration(parent, "name", includedName,
- Collections.<String, String>emptyMap());
+ mMockLoader.loadIncludedConfiguration(
+ parent, "name", includedName, null, Collections.<String, String>emptyMap());
EasyMock.expectLastCall().andThrow(exception);
EasyMock.replay(mMockLoader);
final String config = String.format("<include name=\"%s\" />", includedName);
diff --git a/tests/src/com/android/tradefed/config/SandboxConfigurationFactoryTest.java b/tests/src/com/android/tradefed/config/SandboxConfigurationFactoryTest.java
new file mode 100644
index 0000000..c61065c
--- /dev/null
+++ b/tests/src/com/android/tradefed/config/SandboxConfigurationFactoryTest.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.config;
+
+import static org.easymock.EasyMock.eq;
+import static org.junit.Assert.*;
+
+import com.android.tradefed.sandbox.ISandbox;
+import com.android.tradefed.sandbox.SandboxConfigDump;
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+import com.android.tradefed.util.keystore.StubKeyStoreClient;
+
+import org.easymock.EasyMock;
+import org.easymock.IAnswer;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.File;
+import java.io.IOException;
+
+/** Unit tests for {@link SandboxConfigurationFactory}. */
+@RunWith(JUnit4.class)
+public class SandboxConfigurationFactoryTest {
+
+ private SandboxConfigurationFactory mFactory;
+ private File mConfig;
+ private File mTmpEnvDir;
+ private ISandbox mFakeSandbox;
+ private IRunUtil mMockRunUtil;
+
+ @Before
+ public void setUp() throws IOException {
+ mFactory = SandboxConfigurationFactory.getInstance();
+ mConfig = FileUtil.createTempFile("sandbox-config-test", ".xml");
+ mTmpEnvDir = FileUtil.createTempDir("sandbox-tmp-dir");
+ mFakeSandbox = EasyMock.createMock(ISandbox.class);
+ mMockRunUtil = EasyMock.createMock(IRunUtil.class);
+ }
+
+ @After
+ public void tearDown() {
+ FileUtil.recursiveDelete(mTmpEnvDir);
+ FileUtil.deleteFile(mConfig);
+ }
+
+ private void expectDumpCmd(CommandResult res) {
+ EasyMock.expect(
+ mMockRunUtil.runTimedCmd(
+ EasyMock.anyLong(),
+ eq("java"),
+ eq("-cp"),
+ eq(new File(mTmpEnvDir, "*").getAbsolutePath()),
+ eq(SandboxConfigDump.class.getCanonicalName()),
+ eq(DumpCmd.NON_VERSIONED_CONFIG.toString()),
+ EasyMock.anyObject(),
+ eq(mConfig.getAbsolutePath())))
+ .andAnswer(
+ new IAnswer<CommandResult>() {
+ @Override
+ public CommandResult answer() throws Throwable {
+ String resFile = (String) EasyMock.getCurrentArguments()[6];
+ FileUtil.writeToFile(
+ "<configuration><test class=\"com.android.tradefed.test"
+ + "type.StubTest\" /></configuration>",
+ new File(resFile));
+ return res;
+ }
+ });
+ }
+
+ /**
+ * Test that creating a configuration using a sandbox properly create a {@link IConfiguration}.
+ */
+ @Test
+ public void testCreateConfigurationFromArgs() throws ConfigurationException {
+ String[] args = new String[] {mConfig.getAbsolutePath()};
+ EasyMock.expect(mFakeSandbox.getTradefedEnvironment(EasyMock.anyObject()))
+ .andReturn(mTmpEnvDir);
+ mMockRunUtil.unsetEnvVariable(GlobalConfiguration.GLOBAL_CONFIG_VARIABLE);
+ CommandResult results = new CommandResult();
+ results.setStatus(CommandStatus.SUCCESS);
+ expectDumpCmd(results);
+ EasyMock.replay(mFakeSandbox, mMockRunUtil);
+ IConfiguration config =
+ mFactory.createConfigurationFromArgs(
+ args, new StubKeyStoreClient(), mFakeSandbox, mMockRunUtil);
+ EasyMock.verify(mFakeSandbox, mMockRunUtil);
+ assertNotNull(config.getConfigurationObject(Configuration.SANDBOX_TYPE_NAME));
+ assertEquals(mFakeSandbox, config.getConfigurationObject(Configuration.SANDBOX_TYPE_NAME));
+ }
+
+ /** Test that when the dump config failed, we throw a ConfigurationException. */
+ @Test
+ public void testCreateConfigurationFromArgs_fail() throws ConfigurationException {
+ String[] args = new String[] {mConfig.getAbsolutePath()};
+ EasyMock.expect(mFakeSandbox.getTradefedEnvironment(EasyMock.anyObject()))
+ .andReturn(mTmpEnvDir);
+ mMockRunUtil.unsetEnvVariable(GlobalConfiguration.GLOBAL_CONFIG_VARIABLE);
+ CommandResult results = new CommandResult();
+ results.setStatus(CommandStatus.FAILED);
+ results.setStderr("I failed");
+ expectDumpCmd(results);
+ // in case of failure, tearDown is called right away for cleaning up
+ mFakeSandbox.tearDown();
+ EasyMock.replay(mFakeSandbox, mMockRunUtil);
+ try {
+ mFactory.createConfigurationFromArgs(
+ args, new StubKeyStoreClient(), mFakeSandbox, mMockRunUtil);
+ fail("Should have thrown an exception.");
+ } catch (ConfigurationException expected) {
+ // expected
+ }
+ EasyMock.verify(mFakeSandbox, mMockRunUtil);
+ }
+}
diff --git a/tests/src/com/android/tradefed/device/DeviceManagerTest.java b/tests/src/com/android/tradefed/device/DeviceManagerTest.java
index 41acc6b..a2343f3 100644
--- a/tests/src/com/android/tradefed/device/DeviceManagerTest.java
+++ b/tests/src/com/android/tradefed/device/DeviceManagerTest.java
@@ -380,8 +380,11 @@
CommandResult fastbootResult = new CommandResult(CommandStatus.SUCCESS);
fastbootResult.setStdout("serial fastboot\n");
EasyMock.expect(
- mMockRunUtil.runTimedCmd(EasyMock.anyLong(), EasyMock.eq("fastboot"),
- EasyMock.eq("devices"))).andReturn(fastbootResult);
+ mMockRunUtil.runTimedCmdSilently(
+ EasyMock.anyLong(),
+ EasyMock.eq("fastboot"),
+ EasyMock.eq("devices")))
+ .andReturn(fastbootResult);
EasyMock.expect(mMockTestDevice.handleAllocationEvent(DeviceEvent.FORCE_AVAILABLE))
.andReturn(new DeviceEventResponse(DeviceAllocationState.Available, true));
EasyMock.expect(mMockTestDevice.handleAllocationEvent(DeviceEvent.ALLOCATE_REQUEST))
diff --git a/tests/src/com/android/tradefed/device/FastbootHelperTest.java b/tests/src/com/android/tradefed/device/FastbootHelperTest.java
index 1f5537c..52da00e 100644
--- a/tests/src/com/android/tradefed/device/FastbootHelperTest.java
+++ b/tests/src/com/android/tradefed/device/FastbootHelperTest.java
@@ -138,8 +138,12 @@
CommandResult fakeRes = new CommandResult(CommandStatus.FAILED);
fakeRes.setStdout("");
fakeRes.setStderr("");
- EasyMock.expect(mMockRunUtil.runTimedCmd(EasyMock.anyLong(),
- EasyMock.eq("fastboot"), EasyMock.eq("devices"))).andReturn(fakeRes);
+ EasyMock.expect(
+ mMockRunUtil.runTimedCmdSilently(
+ EasyMock.anyLong(),
+ EasyMock.eq("fastboot"),
+ EasyMock.eq("devices")))
+ .andReturn(fakeRes);
EasyMock.replay(mMockRunUtil);
assertTrue(mFastbootHelper.getDevices().isEmpty());
EasyMock.verify(mMockRunUtil);
diff --git a/tests/src/com/android/tradefed/device/TestDeviceFuncTest.java b/tests/src/com/android/tradefed/device/TestDeviceFuncTest.java
index 9e8a4b5..069c19d 100644
--- a/tests/src/com/android/tradefed/device/TestDeviceFuncTest.java
+++ b/tests/src/com/android/tradefed/device/TestDeviceFuncTest.java
@@ -691,7 +691,7 @@
assertTrue(image.getHeight() > 200);
} finally {
FileUtil.deleteFile(tmpPngFile);
- source.cancel();
+ source.close();
}
}
@@ -724,7 +724,7 @@
s.contains("testGetLogcat_size log dump 99"));
} finally {
FileUtil.deleteFile(tmpTxtFile);
- source.cancel();
+ source.close();
}
}
diff --git a/tests/src/com/android/tradefed/device/TestDeviceTest.java b/tests/src/com/android/tradefed/device/TestDeviceTest.java
index 31d17f7..9af4ec6 100644
--- a/tests/src/com/android/tradefed/device/TestDeviceTest.java
+++ b/tests/src/com/android/tradefed/device/TestDeviceTest.java
@@ -3293,4 +3293,68 @@
assertFalse(mTestDevice.isEncryptionSupported());
EasyMock.verify(mMockIDevice, mMockStateMonitor, mMockDvcMonitor);
}
+
+ /** Test when getting the heapdump is successful. */
+ public void testGetHeapDump() throws Exception {
+ mTestDevice =
+ new TestableTestDevice() {
+ @Override
+ public File pullFile(String remoteFilePath) throws DeviceNotAvailableException {
+ return new File("test");
+ }
+ };
+ injectShellResponse("pidof system_server", "929");
+ injectShellResponse("am dumpheap 929 /data/dump.hprof", "");
+ injectShellResponse("ls \"/data/dump.hprof\"", "/data/dump.hprof");
+ injectShellResponse("rm /data/dump.hprof", "");
+ EasyMock.replay(mMockIDevice, mMockRunUtil);
+ File res = mTestDevice.dumpHeap("system_server", "/data/dump.hprof");
+ assertNotNull(res);
+ EasyMock.verify(mMockIDevice, mMockRunUtil);
+ }
+
+ /** Test when we fail to get the process pid. */
+ public void testGetHeapDump_nopid() throws Exception {
+ injectShellResponse("pidof system_server", "\n");
+ EasyMock.replay(mMockIDevice, mMockRunUtil);
+ File res = mTestDevice.dumpHeap("system_server", "/data/dump.hprof");
+ assertNull(res);
+ EasyMock.verify(mMockIDevice, mMockRunUtil);
+ }
+
+ public void testGetHeapDump_nullPath() throws DeviceNotAvailableException {
+ try {
+ mTestDevice.dumpHeap("system_server", null);
+ fail("Should have thrown an exception");
+ } catch (IllegalArgumentException expected) {
+ // expected
+ }
+ }
+
+ public void testGetHeapDump_emptyPath() throws DeviceNotAvailableException {
+ try {
+ mTestDevice.dumpHeap("system_server", "");
+ fail("Should have thrown an exception");
+ } catch (IllegalArgumentException expected) {
+ // expected
+ }
+ }
+
+ public void testGetHeapDump_nullService() throws DeviceNotAvailableException {
+ try {
+ mTestDevice.dumpHeap(null, "/data/hprof");
+ fail("Should have thrown an exception");
+ } catch (IllegalArgumentException expected) {
+ // expected
+ }
+ }
+
+ public void testGetHeapDump_emptyService() throws DeviceNotAvailableException {
+ try {
+ mTestDevice.dumpHeap("", "/data/hprof");
+ fail("Should have thrown an exception");
+ } catch (IllegalArgumentException expected) {
+ // expected
+ }
+ }
}
diff --git a/tests/src/com/android/tradefed/device/metric/BaseDeviceMetricCollectorTest.java b/tests/src/com/android/tradefed/device/metric/BaseDeviceMetricCollectorTest.java
new file mode 100644
index 0000000..a1e7678
--- /dev/null
+++ b/tests/src/com/android/tradefed/device/metric/BaseDeviceMetricCollectorTest.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import static org.mockito.Mockito.times;
+
+import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.invoker.InvocationContext;
+import com.android.tradefed.result.ByteArrayInputStreamSource;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.result.LogDataType;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mockito;
+
+import java.util.Collections;
+
+/** Unit tests for {@link BaseDeviceMetricCollector}. */
+@RunWith(JUnit4.class)
+public class BaseDeviceMetricCollectorTest {
+
+ private BaseDeviceMetricCollector mBase;
+ private IInvocationContext mContext;
+ private ITestInvocationListener mMockListener;
+
+ @Before
+ public void setUp() {
+ mBase = new BaseDeviceMetricCollector();
+ mContext = new InvocationContext();
+ mMockListener = Mockito.mock(ITestInvocationListener.class);
+ }
+
+ @Test
+ public void testInitAndForwarding() {
+ mBase.init(mContext, mMockListener);
+ mBase.invocationStarted(mContext);
+ mBase.testRunStarted("testRun", 1);
+ TestIdentifier test = new TestIdentifier("class", "method");
+ mBase.testStarted(test);
+ mBase.testLog("dataname", LogDataType.TEXT, new ByteArrayInputStreamSource("".getBytes()));
+ mBase.testFailed(test, "trace");
+ mBase.testAssumptionFailure(test, "trace");
+ mBase.testIgnored(test);
+ mBase.testEnded(test, Collections.emptyMap());
+ mBase.testRunFailed("test run failed");
+ mBase.testRunStopped(0l);
+ mBase.testRunEnded(0l, Collections.emptyMap());
+ mBase.invocationFailed(new Throwable());
+ mBase.invocationEnded(0l);
+
+ Mockito.verify(mMockListener, times(1)).invocationStarted(Mockito.any());
+ Mockito.verify(mMockListener, times(1)).testRunStarted("testRun", 1);
+ Mockito.verify(mMockListener, times(1)).testStarted(Mockito.eq(test), Mockito.anyLong());
+ Mockito.verify(mMockListener, times(1))
+ .testLog(Mockito.eq("dataname"), Mockito.eq(LogDataType.TEXT), Mockito.any());
+ Mockito.verify(mMockListener, times(1)).testFailed(test, "trace");
+ Mockito.verify(mMockListener, times(1)).testAssumptionFailure(test, "trace");
+ Mockito.verify(mMockListener, times(1)).testIgnored(test);
+ Mockito.verify(mMockListener, times(1))
+ .testEnded(Mockito.eq(test), Mockito.anyLong(), Mockito.eq(Collections.emptyMap()));
+ Mockito.verify(mMockListener, times(1)).testRunFailed("test run failed");
+ Mockito.verify(mMockListener, times(1)).testRunStopped(0l);
+ Mockito.verify(mMockListener, times(1)).testRunEnded(0l, Collections.emptyMap());
+ Mockito.verify(mMockListener, times(1)).invocationFailed(Mockito.any());
+ Mockito.verify(mMockListener, times(1)).invocationEnded(0l);
+
+ Assert.assertSame(mMockListener, mBase.getInvocationListener());
+ Assert.assertEquals(0, mBase.getDevices().size());
+ Assert.assertEquals(0, mBase.getBuildInfos().size());
+ }
+}
diff --git a/tests/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollectorTest.java b/tests/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollectorTest.java
new file mode 100644
index 0000000..32a5ad6
--- /dev/null
+++ b/tests/src/com/android/tradefed/device/metric/ScheduledDeviceMetricCollectorTest.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.device.metric;
+
+import static org.junit.Assert.assertTrue;
+
+import com.android.tradefed.config.OptionSetter;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.invoker.InvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.util.RunUtil;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mockito;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/** Unit tests for {@link ScheduledDeviceMetricCollector}. */
+@RunWith(JUnit4.class)
+public class ScheduledDeviceMetricCollectorTest {
+
+ public static class TestableAsyncTimer extends ScheduledDeviceMetricCollector {
+ private int mInternalCounter = 0;
+
+ @Override
+ void collect(DeviceMetricData runData) throws InterruptedException {
+ mInternalCounter++;
+ runData.addStringMetric("key" + mInternalCounter, "value" + mInternalCounter);
+ }
+ }
+
+ private TestableAsyncTimer mBase;
+ private IInvocationContext mContext;
+ private ITestInvocationListener mMockListener;
+
+ @Before
+ public void setUp() {
+ mBase = new TestableAsyncTimer();
+ mContext = new InvocationContext();
+ mMockListener = Mockito.mock(ITestInvocationListener.class);
+ }
+
+ /** Test the periodic run of the collector once testRunStarted has been called. */
+ @Test
+ public void testSetupAndPeriodicRun() throws Exception {
+ OptionSetter setter = new OptionSetter(mBase);
+ // 100 ms interval
+ setter.setOptionValue("interval", "100");
+ Map<String, String> metrics = new HashMap<>();
+ mBase.init(mContext, mMockListener);
+ try {
+ mBase.testRunStarted("testRun", 1);
+ RunUtil.getDefault().sleep(500);
+ } finally {
+ mBase.testRunEnded(0l, metrics);
+ }
+ // We give it 500msec to run and 100msec interval we should easily have at least three
+ // iterations
+ assertTrue(metrics.containsKey("key1"));
+ assertTrue(metrics.containsKey("key2"));
+ assertTrue(metrics.containsKey("key3"));
+ }
+}
diff --git a/tests/src/com/android/tradefed/invoker/ShardListenerTest.java b/tests/src/com/android/tradefed/invoker/ShardListenerTest.java
new file mode 100644
index 0000000..705b55c
--- /dev/null
+++ b/tests/src/com/android/tradefed/invoker/ShardListenerTest.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.invoker;
+
+import com.android.ddmlib.testrunner.TestIdentifier;
+import com.android.tradefed.build.BuildInfo;
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.result.ITestInvocationListener;
+
+import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.util.Collections;
+
+/** Unit tests for {@link ShardListener}. */
+@RunWith(JUnit4.class)
+public class ShardListenerTest {
+ private ShardListener mShardListener;
+ private ITestInvocationListener mMockListener;
+ private IInvocationContext mContext;
+ private ITestDevice mMockDevice;
+
+ @Before
+ public void setUp() {
+ mMockListener = EasyMock.createMock(ITestInvocationListener.class);
+ mShardListener = new ShardListener(mMockListener);
+ mMockDevice = EasyMock.createMock(ITestDevice.class);
+ EasyMock.expect(mMockDevice.getSerialNumber()).andStubReturn("serial");
+ mContext = new InvocationContext();
+ mContext.addDeviceBuildInfo("default", new BuildInfo());
+ mContext.addAllocatedDevice("default", mMockDevice);
+ }
+
+ /** Ensure that all the events given to the shardlistener are replayed on invocationEnded. */
+ @Test
+ public void testBufferAndReplay() {
+ mMockListener.invocationStarted(mContext);
+ mMockListener.testRunStarted("run1", 1);
+ TestIdentifier tid = new TestIdentifier("class1", "name1");
+ mMockListener.testStarted(tid, 0l);
+ mMockListener.testEnded(tid, 0l, Collections.emptyMap());
+ mMockListener.testRunEnded(0l, Collections.emptyMap());
+ mMockListener.invocationEnded(0l);
+
+ EasyMock.replay(mMockListener, mMockDevice);
+ mShardListener.invocationStarted(mContext);
+ mShardListener.testRunStarted("run1", 1);
+ mShardListener.testStarted(tid, 0l);
+ mShardListener.testEnded(tid, 0l, Collections.emptyMap());
+ mShardListener.testRunEnded(0l, Collections.emptyMap());
+ mShardListener.invocationEnded(0l);
+ EasyMock.verify(mMockListener, mMockDevice);
+ }
+
+ /** Test that the buffering of events is properly done in respect to the modules too. */
+ @Test
+ public void testBufferAndReplay_withModule() {
+ IInvocationContext module1 = new InvocationContext();
+ IInvocationContext module2 = new InvocationContext();
+ mMockListener.invocationStarted(mContext);
+ mMockListener.testModuleStarted(module1);
+ mMockListener.testRunStarted("run1", 1);
+ TestIdentifier tid = new TestIdentifier("class1", "name1");
+ mMockListener.testStarted(tid, 0l);
+ mMockListener.testEnded(tid, 0l, Collections.emptyMap());
+ mMockListener.testRunEnded(0l, Collections.emptyMap());
+ mMockListener.testRunStarted("run2", 1);
+ mMockListener.testStarted(tid, 0l);
+ mMockListener.testEnded(tid, 0l, Collections.emptyMap());
+ mMockListener.testRunEnded(0l, Collections.emptyMap());
+ mMockListener.testModuleEnded();
+ // expectation on second module
+ mMockListener.testModuleStarted(module2);
+ mMockListener.testRunStarted("run3", 1);
+ mMockListener.testStarted(tid, 0l);
+ mMockListener.testEnded(tid, 0l, Collections.emptyMap());
+ mMockListener.testRunEnded(0l, Collections.emptyMap());
+ mMockListener.testModuleEnded();
+ mMockListener.invocationEnded(0l);
+
+ EasyMock.replay(mMockListener, mMockDevice);
+ mShardListener.invocationStarted(mContext);
+ // 1st module
+ mShardListener.testModuleStarted(module1);
+ mShardListener.testRunStarted("run1", 1);
+ mShardListener.testStarted(tid, 0l);
+ mShardListener.testEnded(tid, 0l, Collections.emptyMap());
+ mShardListener.testRunEnded(0l, Collections.emptyMap());
+ mShardListener.testRunStarted("run2", 1);
+ mShardListener.testStarted(tid, 0l);
+ mShardListener.testEnded(tid, 0l, Collections.emptyMap());
+ mShardListener.testRunEnded(0l, Collections.emptyMap());
+ mShardListener.testModuleEnded();
+ // 2nd module
+ mShardListener.testModuleStarted(module2);
+ mShardListener.testRunStarted("run3", 1);
+ mShardListener.testStarted(tid, 0l);
+ mShardListener.testEnded(tid, 0l, Collections.emptyMap());
+ mShardListener.testRunEnded(0l, Collections.emptyMap());
+ mShardListener.testModuleEnded();
+
+ mShardListener.invocationEnded(0l);
+ EasyMock.verify(mMockListener, mMockDevice);
+ }
+}
diff --git a/tests/src/com/android/tradefed/invoker/TestInvocationMultiTest.java b/tests/src/com/android/tradefed/invoker/TestInvocationMultiTest.java
index 2307f1d..9cd65dd 100644
--- a/tests/src/com/android/tradefed/invoker/TestInvocationMultiTest.java
+++ b/tests/src/com/android/tradefed/invoker/TestInvocationMultiTest.java
@@ -21,6 +21,7 @@
import com.android.tradefed.build.IBuildProvider;
import com.android.tradefed.command.CommandOptions;
import com.android.tradefed.command.CommandRunner.ExitCode;
+import com.android.tradefed.config.ConfigurationDescriptor;
import com.android.tradefed.config.DeviceConfigurationHolder;
import com.android.tradefed.config.IConfiguration;
import com.android.tradefed.device.ITestDevice;
@@ -54,6 +55,7 @@
private ILogSaver mMockLogSaver;
private ILeveledLogOutput mMockLogger;
private ILogRegistry mMockLogRegistry;
+ private ConfigurationDescriptor mConfigDesc;
private ITestDevice mDevice1;
private ITestDevice mDevice2;
@@ -69,6 +71,7 @@
mMockLogSaver = EasyMock.createMock(ILogSaver.class);
mMockLogger = EasyMock.createMock(ILeveledLogOutput.class);
mMockLogRegistry = EasyMock.createMock(ILogRegistry.class);
+ mConfigDesc = new ConfigurationDescriptor();
mInvocation =
new TestInvocation() {
@Override
@@ -130,6 +133,7 @@
EasyMock.expect(mMockConfig.getProfiler()).andReturn(null);
EasyMock.expect(mMockConfig.getLogSaver()).andReturn(mMockLogSaver);
EasyMock.expect(mMockConfig.getLogOutput()).andReturn(mMockLogger).times(4);
+ EasyMock.expect(mMockConfig.getConfigurationDescription()).andReturn(mConfigDesc);
mMockLogger.init();
mMockLogger.closeLog();
@@ -185,6 +189,7 @@
EasyMock.expect(mMockConfig.getProfiler()).andReturn(null);
EasyMock.expect(mMockConfig.getLogSaver()).andReturn(mMockLogSaver);
EasyMock.expect(mMockConfig.getLogOutput()).andStubReturn(mMockLogger);
+ EasyMock.expect(mMockConfig.getConfigurationDescription()).andReturn(mConfigDesc);
mMockLogger.init();
EasyMock.expect(mMockLogger.getLog())
.andReturn(new ByteArrayInputStreamSource("fake".getBytes()));
@@ -263,6 +268,7 @@
EasyMock.expect(mMockConfig.getProfiler()).andReturn(null);
EasyMock.expect(mMockConfig.getLogSaver()).andReturn(mMockLogSaver);
EasyMock.expect(mMockConfig.getLogOutput()).andStubReturn(mMockLogger);
+ EasyMock.expect(mMockConfig.getConfigurationDescription()).andReturn(mConfigDesc);
mMockLogger.init();
EasyMock.expect(mMockLogger.getLog())
.andReturn(new ByteArrayInputStreamSource("fake".getBytes()));
diff --git a/tests/src/com/android/tradefed/invoker/TestInvocationTest.java b/tests/src/com/android/tradefed/invoker/TestInvocationTest.java
index 963b985..b03a479 100644
--- a/tests/src/com/android/tradefed/invoker/TestInvocationTest.java
+++ b/tests/src/com/android/tradefed/invoker/TestInvocationTest.java
@@ -18,6 +18,7 @@
import static org.mockito.Mockito.doReturn;
import com.android.ddmlib.IDevice;
+import com.android.ddmlib.testrunner.TestIdentifier;
import com.android.tradefed.build.BuildInfo;
import com.android.tradefed.build.BuildRetrievalError;
import com.android.tradefed.build.IBuildInfo;
@@ -44,6 +45,9 @@
import com.android.tradefed.device.ITestDevice.RecoveryMode;
import com.android.tradefed.device.StubDevice;
import com.android.tradefed.device.TestDeviceOptions;
+import com.android.tradefed.device.metric.BaseDeviceMetricCollector;
+import com.android.tradefed.device.metric.IMetricCollector;
+import com.android.tradefed.device.metric.DeviceMetricData;
import com.android.tradefed.invoker.shard.IShardHelper;
import com.android.tradefed.invoker.shard.ShardHelper;
import com.android.tradefed.invoker.shard.StrictShardHelper;
@@ -72,6 +76,7 @@
import com.android.tradefed.testtype.IRetriableTest;
import com.android.tradefed.testtype.IShardableTest;
import com.android.tradefed.testtype.IStrictShardableTest;
+import com.android.tradefed.testtype.StubTest;
import com.android.tradefed.util.FileUtil;
import com.google.common.util.concurrent.SettableFuture;
@@ -1533,7 +1538,7 @@
(InputStreamSource) EasyMock.anyObject());
EasyMock.replay(device1, listener);
- mTestInvocation.doSetup(mStubConfiguration, context, listener);
+ mTestInvocation.doSetup(context, mStubConfiguration, listener);
EasyMock.verify(device1, listener);
}
@@ -1564,7 +1569,7 @@
(InputStreamSource) EasyMock.anyObject());
EasyMock.replay(device1, listener);
- mTestInvocation.doSetup(mStubConfiguration, context, listener);
+ mTestInvocation.doSetup(context, mStubConfiguration, listener);
EasyMock.verify(device1, listener);
}
@@ -1657,4 +1662,54 @@
FileUtil.recursiveDelete(tmpExternalTestsDir);
}
}
+
+ private class TestableCollector extends BaseDeviceMetricCollector {
+
+ private String mName;
+
+ public TestableCollector(String name) {
+ mName = name;
+ }
+
+ @Override
+ public void onTestRunEnd(DeviceMetricData runData) {
+ runData.addStringMetric(mName, mName);
+ }
+ }
+
+ /**
+ * Test that when {@link IMetricCollector} are used, they wrap and call in sequence the listener
+ * so all metrics end up on the final receiver.
+ */
+ public void testMetricCollectionChain() throws Exception {
+ IConfiguration configuration = new Configuration("test", "description");
+ StubTest test = new StubTest();
+ OptionSetter setter = new OptionSetter(test);
+ setter.setOptionValue("run-a-test", "true");
+ configuration.setTest(test);
+
+ List<IMetricCollector> collectors = new ArrayList<>();
+ collectors.add(new TestableCollector("collector1"));
+ collectors.add(new TestableCollector("collector2"));
+ collectors.add(new TestableCollector("collector3"));
+ collectors.add(new TestableCollector("collector4"));
+ configuration.setDeviceMetricCollectors(collectors);
+
+ mMockTestListener.testRunStarted("TestStub", 1);
+ TestIdentifier testId = new TestIdentifier("StubTest", "StubMethod");
+ mMockTestListener.testStarted(EasyMock.eq(testId), EasyMock.anyLong());
+ mMockTestListener.testEnded(
+ EasyMock.eq(testId), EasyMock.anyLong(), EasyMock.eq(Collections.emptyMap()));
+ Capture<Map<String, String>> captured = new Capture<>();
+ mMockTestListener.testRunEnded(EasyMock.anyLong(), EasyMock.capture(captured));
+ EasyMock.replay(mMockTestListener);
+ mTestInvocation.runTests(mStubInvocationMetadata, configuration, mMockTestListener);
+ EasyMock.verify(mMockTestListener);
+ // The collectors are called in sequence
+ List<String> listKeys = new ArrayList<>(captured.getValue().keySet());
+ assertEquals("collector4", listKeys.get(0));
+ assertEquals("collector3", listKeys.get(1));
+ assertEquals("collector2", listKeys.get(2));
+ assertEquals("collector1", listKeys.get(3));
+ }
}
diff --git a/tests/src/com/android/tradefed/profiler/AggregatingProfilerTest.java b/tests/src/com/android/tradefed/profiler/AggregatingProfilerTest.java
index e3bad75..596ce32 100644
--- a/tests/src/com/android/tradefed/profiler/AggregatingProfilerTest.java
+++ b/tests/src/com/android/tradefed/profiler/AggregatingProfilerTest.java
@@ -162,7 +162,7 @@
metric1.put("hello1", 1.0);
mProfiler.setAggregateMetrics(metric1);
ITestInvocationListener mockListener = EasyMock.createMock(ITestInvocationListener.class);
- mockListener.testLog((String)EasyMock.anyObject(), EasyMock.eq(LogDataType.TEXT),
+ mockListener.testLog((String)EasyMock.anyObject(), EasyMock.eq(LogDataType.MUGSHOT_LOG),
(InputStreamSource)EasyMock.anyObject());
EasyMock.replay(mockListener);
mProfiler.reportAllMetrics(mockListener);
diff --git a/tests/src/com/android/tradefed/profiler/recorder/NumericAggregateFunctionTest.java b/tests/src/com/android/tradefed/profiler/recorder/NumericAggregateFunctionTest.java
new file mode 100644
index 0000000..3bc7c18
--- /dev/null
+++ b/tests/src/com/android/tradefed/profiler/recorder/NumericAggregateFunctionTest.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.tradefed.profiler.recorder;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.util.function.BiFunction;
+
+/** Unit tests for {@link NumericAggregateFunction}. */
+@RunWith(JUnit4.class)
+public class NumericAggregateFunctionTest {
+ private static final double EPSILON = 1E-6;
+
+ @Test
+ public void testMergeFunctionAvg() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.AVG).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(1.5d, foo, EPSILON);
+ foo = f.apply(foo, 3d);
+ Assert.assertEquals(2d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionAvgMultipleInstances() throws Exception {
+ BiFunction<Double, Double, Double> f_foo =
+ new NumericAggregateFunction(MetricType.AVG).getFunction();
+ BiFunction<Double, Double, Double> f_bar =
+ new NumericAggregateFunction(MetricType.AVG).getFunction();
+ double foo = 0;
+ foo = f_foo.apply(foo, -2d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ foo = f_foo.apply(foo, 5d);
+ Assert.assertEquals(1.5d, foo, EPSILON);
+
+ double bar = 0;
+ bar = f_bar.apply(bar, 100d);
+ Assert.assertEquals(100d, bar, EPSILON);
+
+ foo = f_foo.apply(foo, 3d);
+ Assert.assertEquals(2d, foo, EPSILON);
+
+ bar = f_bar.apply(bar, 200d);
+ Assert.assertEquals(150d, bar, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionAvgTime() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.AVGTIME).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(1.5d, foo, EPSILON);
+ foo = f.apply(foo, 3d);
+ Assert.assertEquals(2d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionCount() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.COUNT).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(1d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(2d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionCountPositive() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.COUNTPOS).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(0d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(1d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionMax() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.MAX).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(0, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(5d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionMin() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.MIN).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ }
+
+ @Test
+ public void testMergeFunctionSum() throws Exception {
+ BiFunction<Double, Double, Double> f =
+ new NumericAggregateFunction(MetricType.SUM).getFunction();
+ double foo = 0;
+ foo = f.apply(foo, -2d);
+ Assert.assertEquals(-2d, foo, EPSILON);
+ foo = f.apply(foo, 5d);
+ Assert.assertEquals(3d, foo, EPSILON);
+ }
+}
diff --git a/tests/src/com/android/tradefed/result/SnapshotInputStreamSourceTest.java b/tests/src/com/android/tradefed/result/SnapshotInputStreamSourceTest.java
index 943d400..4d5b8fc 100644
--- a/tests/src/com/android/tradefed/result/SnapshotInputStreamSourceTest.java
+++ b/tests/src/com/android/tradefed/result/SnapshotInputStreamSourceTest.java
@@ -34,7 +34,7 @@
}
/**
- * Ensure that the {@link SnapshotInputStreamSource#cancel()} method cleans up the backing file
+ * Ensure that the {@link SnapshotInputStreamSource#close()} method cleans up the backing file
* as expected
*/
@SuppressWarnings("serial")
@@ -56,7 +56,7 @@
};
try {
- source.cancel();
+ source.close();
fail("Fake file was not deleted");
} catch (RuntimeException e) {
if (!deletedMsg.equals(e.getMessage())) {
diff --git a/tests/src/com/android/tradefed/sandbox/SandboxConfigDumpTest.java b/tests/src/com/android/tradefed/sandbox/SandboxConfigDumpTest.java
new file mode 100644
index 0000000..ff9b7e7
--- /dev/null
+++ b/tests/src/com/android/tradefed/sandbox/SandboxConfigDumpTest.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.util.FileUtil;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.File;
+
+/** Unit tests for {@link com.android.tradefed.sandbox.SandboxConfigDump}. */
+@RunWith(JUnit4.class)
+public class SandboxConfigDumpTest {
+ private SandboxConfigDump mConfigDump;
+ private File mOutputFile;
+
+ @Before
+ public void setUp() throws Exception {
+ mConfigDump = new SandboxConfigDump();
+ mOutputFile = FileUtil.createTempFile("temp-file-config", ".xml");
+ }
+
+ @After
+ public void tearDown() {
+ FileUtil.deleteFile(mOutputFile);
+ }
+
+ /**
+ * Test {@link com.android.tradefed.sandbox.SandboxConfigDump#parse(String[])} parse and output
+ * and full xml based on command line.
+ */
+ @Test
+ public void testParseCommandLine() throws Exception {
+ String[] commandLine =
+ new String[] {DumpCmd.FULL_XML.toString(), mOutputFile.getAbsolutePath(), "empty"};
+ int res = mConfigDump.parse(commandLine);
+ assertEquals(0, res);
+ String output = FileUtil.readStringFromFile(mOutputFile);
+ assertTrue(!output.isEmpty());
+ assertTrue(output.contains("<test class"));
+ assertTrue(
+ output.contains(
+ "<result_reporter class=\"com.android.tradefed.result."
+ + "TextResultReporter\""));
+ }
+
+ /**
+ * Test {@link com.android.tradefed.sandbox.SandboxConfigDump#parse(String[])} parse and output
+ * a partial xml without versioned object (test, target_prep, multi_target_prep).
+ */
+ @Test
+ public void testParseCommandLine_filtered() throws Exception {
+ String[] commandLine =
+ new String[] {
+ DumpCmd.NON_VERSIONED_CONFIG.toString(), mOutputFile.getAbsolutePath(), "empty"
+ };
+ int res = mConfigDump.parse(commandLine);
+ assertEquals(0, res);
+ String output = FileUtil.readStringFromFile(mOutputFile);
+ assertTrue(!output.isEmpty());
+ assertFalse(output.contains("<test class"));
+ assertTrue(
+ output.contains(
+ "<result_reporter class=\"com.android.tradefed.result."
+ + "TextResultReporter\""));
+ }
+
+ /**
+ * Test {@link com.android.tradefed.sandbox.SandboxConfigDump#parse(String[])} parse and output
+ * an xml meant to be run in subprocess, the subprocess result reporter has been added.
+ */
+ @Test
+ public void testParseCommandLine_run() throws Exception {
+ String[] commandLine =
+ new String[] {
+ DumpCmd.RUN_CONFIG.toString(), mOutputFile.getAbsolutePath(), "empty"
+ };
+ int res = mConfigDump.parse(commandLine);
+ assertEquals(0, res);
+ String output = FileUtil.readStringFromFile(mOutputFile);
+ assertTrue(!output.isEmpty());
+ assertTrue(output.contains("<test class"));
+ assertTrue(
+ output.contains(
+ "<result_reporter class=\"com.android.tradefed.result."
+ + "SubprocessResultsReporter\""));
+ }
+}
diff --git a/tests/src/com/android/tradefed/sandbox/SandboxConfigUtilTest.java b/tests/src/com/android/tradefed/sandbox/SandboxConfigUtilTest.java
new file mode 100644
index 0000000..f82a1d9
--- /dev/null
+++ b/tests/src/com/android/tradefed/sandbox/SandboxConfigUtilTest.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.doReturn;
+
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.sandbox.SandboxConfigDump.DumpCmd;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mockito;
+
+import java.io.File;
+
+/** Unit tests for {@link com.android.tradefed.sandbox.SandboxConfigUtil}. */
+@RunWith(JUnit4.class)
+public class SandboxConfigUtilTest {
+
+ private IRunUtil mMockRunUtil;
+
+ @Before
+ public void setUp() {
+ mMockRunUtil = Mockito.mock(IRunUtil.class);
+ }
+
+ /**
+ * Test {@link com.android.tradefed.sandbox.SandboxConfigUtil#dumpConfigForVersion(File,
+ * IRunUtil, String[], DumpCmd)} for a success case when the command returns a valid file.
+ */
+ @Test
+ public void testDumpVersion() throws Exception {
+ CommandResult result = new CommandResult();
+ result.setStatus(CommandStatus.SUCCESS);
+ doReturn(result).when(mMockRunUtil).runTimedCmd(Mockito.anyLong(), Mockito.any());
+ File res = null;
+ try {
+ res =
+ SandboxConfigUtil.dumpConfigForVersion(
+ new File(""), mMockRunUtil, new String[] {"empty"}, DumpCmd.FULL_XML);
+ assertNotNull(res);
+ } finally {
+ FileUtil.deleteFile(res);
+ }
+ }
+
+ /**
+ * Test {@link com.android.tradefed.sandbox.SandboxConfigUtil#dumpConfigForVersion(File,
+ * IRunUtil, String[], DumpCmd)} for a failure case, the command throws an exception.
+ */
+ @Test
+ public void testDumpVersion_failed() throws Exception {
+ CommandResult result = new CommandResult();
+ result.setStatus(CommandStatus.FAILED);
+ result.setStderr("Ouch I failed");
+ doReturn(result).when(mMockRunUtil).runTimedCmd(Mockito.anyLong(), Mockito.any());
+ try {
+ SandboxConfigUtil.dumpConfigForVersion(
+ new File(""), mMockRunUtil, new String[] {"empty"}, DumpCmd.FULL_XML);
+ fail("Should have thrown an exception.");
+ } catch (ConfigurationException expected) {
+ assertEquals("Ouch I failed", expected.getMessage());
+ }
+ }
+}
diff --git a/tests/src/com/android/tradefed/sandbox/TradefedSandboxTest.java b/tests/src/com/android/tradefed/sandbox/TradefedSandboxTest.java
new file mode 100644
index 0000000..0a64028
--- /dev/null
+++ b/tests/src/com/android/tradefed/sandbox/TradefedSandboxTest.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.sandbox;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import com.android.tradefed.config.ConfigurationException;
+import com.android.tradefed.config.IConfiguration;
+import com.android.tradefed.invoker.IInvocationContext;
+import com.android.tradefed.invoker.InvocationContext;
+import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.util.CommandResult;
+import com.android.tradefed.util.CommandStatus;
+import com.android.tradefed.util.FileUtil;
+import com.android.tradefed.util.IRunUtil;
+
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.File;
+
+/** Unit tests for {@link com.android.tradefed.sandbox.TradefedSandbox}. */
+@RunWith(JUnit4.class)
+public class TradefedSandboxTest {
+ private static final String TF_JAR_DIR = "TF_JAR_DIR";
+ private String mCachedProperty;
+ private File mTmpFolder;
+
+ private TradefedSandbox mSandbox;
+ private ITestInvocationListener mMockListener;
+ private IConfiguration mMockConfig;
+ private IInvocationContext mMockContext;
+ private IRunUtil mMockRunUtil;
+
+ @Before
+ public void setUp() throws Exception {
+ mMockRunUtil = EasyMock.createMock(IRunUtil.class);
+ mSandbox =
+ new TradefedSandbox() {
+ @Override
+ IRunUtil createRunUtil() {
+ return mMockRunUtil;
+ }
+ };
+ mMockListener = EasyMock.createMock(ITestInvocationListener.class);
+ mMockConfig = EasyMock.createMock(IConfiguration.class);
+ mMockContext = new InvocationContext();
+
+ mTmpFolder = FileUtil.createTempDir("tmp-tf-jar-dir");
+
+ if (System.getProperty(TF_JAR_DIR) != null) {
+ mCachedProperty = System.getProperty(TF_JAR_DIR);
+ }
+ System.setProperty(TF_JAR_DIR, mTmpFolder.getAbsolutePath());
+ }
+
+ @After
+ public void tearDown() {
+ if (mCachedProperty != null) {
+ System.setProperty(TF_JAR_DIR, mCachedProperty);
+ }
+ FileUtil.recursiveDelete(mTmpFolder);
+ mSandbox.tearDown();
+ }
+
+ /**
+ * Test a case where the {@link
+ * com.android.tradefed.sandbox.TradefedSandbox#prepareEnvironment(IInvocationContext,
+ * IConfiguration, ITestInvocationListener)} succeed and does not have any exception.
+ */
+ @Test
+ public void testPrepareEnvironment() throws Exception {
+ mMockRunUtil.unsetEnvVariable(TradefedSandbox.TF_GLOBAL_CONFIG);
+ CommandResult result = new CommandResult();
+ result.setStatus(CommandStatus.SUCCESS);
+ EasyMock.expect(
+ mMockRunUtil.runTimedCmd(
+ EasyMock.anyLong(),
+ EasyMock.eq("java"),
+ EasyMock.eq("-cp"),
+ EasyMock.anyObject(),
+ EasyMock.eq(SandboxConfigDump.class.getCanonicalName()),
+ EasyMock.eq("RUN_CONFIG"),
+ EasyMock.anyObject(),
+ EasyMock.eq("empty"),
+ EasyMock.eq("--arg"),
+ EasyMock.eq("1")))
+ .andReturn(result);
+ setPrepareConfigurationExpectations();
+ EasyMock.replay(mMockConfig, mMockListener, mMockRunUtil);
+ Exception res = mSandbox.prepareEnvironment(mMockContext, mMockConfig, mMockListener);
+ EasyMock.verify(mMockConfig, mMockListener, mMockRunUtil);
+ assertNull(res);
+ }
+
+ /**
+ * Test a case where the {@link
+ * com.android.tradefed.sandbox.TradefedSandbox#prepareEnvironment(IInvocationContext,
+ * IConfiguration, ITestInvocationListener)} fails to dump the configuration, in that case the
+ * std err from the dump utility is used for the exception.
+ */
+ @Test
+ public void testPrepareEnvironment_dumpConfigFail() throws Exception {
+ mMockRunUtil.unsetEnvVariable(TradefedSandbox.TF_GLOBAL_CONFIG);
+ CommandResult result = new CommandResult();
+ result.setStatus(CommandStatus.FAILED);
+ result.setStderr("Ouch I failed.");
+ EasyMock.expect(
+ mMockRunUtil.runTimedCmd(
+ EasyMock.anyLong(),
+ EasyMock.eq("java"),
+ EasyMock.eq("-cp"),
+ EasyMock.anyObject(),
+ EasyMock.eq(SandboxConfigDump.class.getCanonicalName()),
+ EasyMock.eq("RUN_CONFIG"),
+ EasyMock.anyObject(),
+ EasyMock.eq("empty"),
+ EasyMock.eq("--arg"),
+ EasyMock.eq("1")))
+ .andReturn(result);
+ setPrepareConfigurationExpectations();
+ EasyMock.replay(mMockConfig, mMockListener, mMockRunUtil);
+ Exception res = mSandbox.prepareEnvironment(mMockContext, mMockConfig, mMockListener);
+ EasyMock.verify(mMockConfig, mMockListener, mMockRunUtil);
+ assertNotNull(res);
+ assertTrue(res instanceof ConfigurationException);
+ assertEquals("Ouch I failed.", res.getMessage());
+ }
+
+ /**
+ * Test a case where the {@link
+ * com.android.tradefed.sandbox.TradefedSandbox#prepareEnvironment(IInvocationContext,
+ * IConfiguration, ITestInvocationListener)} throws an exception because TF_JAR_DIR was not set.
+ */
+ @Test
+ public void testPrepareEnvironment_noTfDirJar() throws Exception {
+ mMockRunUtil.unsetEnvVariable(TradefedSandbox.TF_GLOBAL_CONFIG);
+ EasyMock.expect(mMockConfig.getCommandLine()).andReturn("empty --arg 1");
+ System.setProperty(TF_JAR_DIR, "");
+ EasyMock.replay(mMockConfig, mMockListener, mMockRunUtil);
+ Exception res = mSandbox.prepareEnvironment(mMockContext, mMockConfig, mMockListener);
+ EasyMock.verify(mMockConfig, mMockListener, mMockRunUtil);
+ assertNotNull(res);
+ assertTrue(res instanceof ConfigurationException);
+ assertEquals(
+ "Could not read TF_JAR_DIR to get current Tradefed instance.", res.getMessage());
+ }
+
+ private void setPrepareConfigurationExpectations() throws Exception {
+ EasyMock.expect(mMockConfig.getCommandLine()).andReturn("empty --arg 1").times(2);
+ }
+}
diff --git a/tests/src/com/android/tradefed/suite/checker/ActivityStatusCheckerTest.java b/tests/src/com/android/tradefed/suite/checker/ActivityStatusCheckerTest.java
new file mode 100644
index 0000000..cf7a085
--- /dev/null
+++ b/tests/src/com/android/tradefed/suite/checker/ActivityStatusCheckerTest.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.tradefed.suite.checker;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import com.android.tradefed.device.ITestDevice;
+import com.android.tradefed.log.ITestLogger;
+import com.android.tradefed.result.ByteArrayInputStreamSource;
+import com.android.tradefed.result.InputStreamSource;
+import com.android.tradefed.result.LogDataType;
+
+import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link ActivityStatusChecker}. */
+@RunWith(JUnit4.class)
+public class ActivityStatusCheckerTest {
+ private ActivityStatusChecker mChecker;
+ private ITestLogger mMockLogger;
+ private ITestDevice mMockDevice;
+
+ @Before
+ public void setUp() {
+ mMockLogger = EasyMock.createStrictMock(ITestLogger.class);
+ mChecker = new ActivityStatusChecker();
+ mChecker.setTestLogger(mMockLogger);
+ mMockDevice = EasyMock.createStrictMock(ITestDevice.class);
+ }
+
+ /** Test that the status checker is a success if the home/launcher activity is on top. */
+ @Test
+ public void testCheckerLauncherHomeScreen() throws Exception {
+ EasyMock.expect(mMockDevice.executeShellCommand(EasyMock.anyObject()))
+ .andReturn(
+ " mCurrentFocus=Window{46dd15 u0 com.google.android.apps.nexuslauncher/"
+ + "com.google.android.apps.nexuslauncher.NexusLauncherActivity}\n"
+ + " mFocusedApp=AppWindowToken{37e3c39 token=Token{312ce85 ActivityRecord{a9437fc "
+ + "u0 com.google.android.apps.nexuslauncher/.NexusLauncherActivity t2}}}");
+ EasyMock.replay(mMockLogger, mMockDevice);
+ assertTrue(mChecker.postExecutionCheck(mMockDevice));
+ EasyMock.verify(mMockLogger, mMockDevice);
+ }
+
+ /** Test that if another activity is on top, then we fail the checker and take a screenshot. */
+ @Test
+ public void testCheckerOtherActivity() throws Exception {
+ EasyMock.expect(mMockDevice.executeShellCommand(EasyMock.anyObject()))
+ .andReturn(
+ "mCurrentFocus=Window{52b89df u0 com.android.chrome/org.chromium.chrome."
+ + "browser.ChromeTabbedActivity}\n"
+ + " mFocusedApp=AppWindowToken{955b485 token=Token{6bebd1b ActivityRecord{fd30b2a "
+ + "u0 com.android.chrome/org.chromium.chrome.browser.ChromeTabbedActivity t7}}}");
+ InputStreamSource fake = new ByteArrayInputStreamSource("fakedata".getBytes());
+ EasyMock.expect(mMockDevice.getScreenshot(EasyMock.anyObject())).andReturn(fake);
+ mMockLogger.testLog("status_checker_front_activity", LogDataType.JPEG, fake);
+ EasyMock.replay(mMockLogger, mMockDevice);
+ assertFalse(mChecker.postExecutionCheck(mMockDevice));
+ EasyMock.verify(mMockLogger, mMockDevice);
+ }
+}
diff --git a/tests/src/com/android/tradefed/targetprep/InstallAllTestZipAppsSetupTest.java b/tests/src/com/android/tradefed/targetprep/InstallAllTestZipAppsSetupTest.java
index daf3174..b1a50b2 100644
--- a/tests/src/com/android/tradefed/targetprep/InstallAllTestZipAppsSetupTest.java
+++ b/tests/src/com/android/tradefed/targetprep/InstallAllTestZipAppsSetupTest.java
@@ -96,7 +96,7 @@
}
@Test
- public void testGetZipFile() throws DeviceNotAvailableException, TargetSetupError {
+ public void testGetZipFile() throws TargetSetupError {
String zip = "zip";
mPrep.setTestZipName(zip);
File file = new File(zip);
@@ -108,7 +108,7 @@
}
@Test
- public void testGetZipFileDoesntExist() throws DeviceNotAvailableException, TargetSetupError {
+ public void testGetZipFileDoesntExist() throws TargetSetupError {
String zip = "zip";
mPrep.setTestZipName(zip);
EasyMock.expect(mMockBuildInfo.getFile(zip)).andReturn(null);
diff --git a/tests/src/com/android/tradefed/testtype/CodeCoverageTestBaseTest.java b/tests/src/com/android/tradefed/testtype/CodeCoverageTestBaseTest.java
index 3181e34..03ac54f 100644
--- a/tests/src/com/android/tradefed/testtype/CodeCoverageTestBaseTest.java
+++ b/tests/src/com/android/tradefed/testtype/CodeCoverageTestBaseTest.java
@@ -15,8 +15,11 @@
*/
package com.android.tradefed.testtype;
+import static com.google.common.truth.Truth.assertThat;
+import static org.mockito.AdditionalMatchers.gt;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyCollection;
+import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
@@ -24,6 +27,7 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
+import com.android.ddmlib.IDevice;
import com.android.ddmlib.testrunner.IRemoteAndroidTestRunner;
import com.android.ddmlib.testrunner.ITestRunListener;
import com.android.ddmlib.testrunner.RemoteAndroidTestRunner;
@@ -34,33 +38,47 @@
import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.log.ITestLogger;
+import com.android.tradefed.result.InputStreamSource;
import com.android.tradefed.result.ITestInvocationListener;
import com.android.tradefed.result.LogDataType;
import com.android.tradefed.util.ICompressionStrategy;
import com.android.tradefed.util.ListInstrumentationParser;
import com.android.tradefed.util.ListInstrumentationParser.InstrumentationTarget;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
+import com.google.protobuf.ByteString;
-import junit.framework.TestCase;
-
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Map;
/** Unit tests for {@link CodeCoverageTestBase}. */
-public class CodeCoverageTestBaseTest extends TestCase {
+@RunWith(JUnit4.class)
+public class CodeCoverageTestBaseTest {
+
+ private static final long TEST_RUN_TIME = 1000;
+ private static final String COVERAGE_PATH = "/data/user/0/%s/files/coverage.ec";
private static final String PACKAGE_NAME1 = "com.example.foo.test";
private static final String PACKAGE_NAME2 = "com.example.bar.test";
@@ -74,20 +92,59 @@
private static final TestIdentifier FOO_TEST2 = new TestIdentifier(".FooTest", "test2");
private static final TestIdentifier FOO_TEST3 = new TestIdentifier(".FooTest", "test3");
private static final List<TestIdentifier> FOO_TESTS =
- Arrays.asList(FOO_TEST1, FOO_TEST2, FOO_TEST3);
+ ImmutableList.of(FOO_TEST1, FOO_TEST2, FOO_TEST3);
private static final TestIdentifier BAR_TEST1 = new TestIdentifier(".BarTest", "test1");
private static final TestIdentifier BAR_TEST2 = new TestIdentifier(".BarTest", "test2");
- private static final List<TestIdentifier> BAR_TESTS = Arrays.asList(BAR_TEST1, BAR_TEST2);
+ private static final List<TestIdentifier> BAR_TESTS = ImmutableList.of(BAR_TEST1, BAR_TEST2);
private static final TestIdentifier BAZ_TEST1 = new TestIdentifier(".BazTest", "test1");
private static final TestIdentifier BAZ_TEST2 = new TestIdentifier(".BazTest", "test2");
- private static final List<TestIdentifier> BAZ_TESTS = Arrays.asList(BAZ_TEST1, BAZ_TEST2);
+ private static final List<TestIdentifier> BAZ_TESTS = ImmutableList.of(BAZ_TEST1, BAZ_TEST2);
- private static final File FAKE_COVERAGE_REPORT = new File("/some/fake/report/");
+ private static final ByteString FAKE_REPORT_CONTENTS =
+ ByteString.copyFromUtf8("Mi estas kovrado raporto");
+
+ private static final ByteString FAKE_MEASUREMENT1 =
+ ByteString.copyFromUtf8("Mi estas kovrado mezurado");
+ private static final ByteString FAKE_MEASUREMENT2 =
+ ByteString.copyFromUtf8("Mi estas ankau kovrado mezurado");
+ private static final ByteString FAKE_MEASUREMENT3 =
+ ByteString.copyFromUtf8("Mi estas ankorau alia priraportado mezurado");
private static final IBuildInfo BUILD_INFO = new BuildInfo("123456", "device-userdebug");
+ @Rule public TemporaryFolder mFolder = new TemporaryFolder();
+
+ // Mocks
+ @Mock ITestDevice mDevice;
+
+ @Mock ITestInvocationListener mListener;
+
+ @Mock ListInstrumentationParser mInstrumentationParser;
+
+ // Fake test data
+ @Mock TestDataRegistry<List<TestIdentifier>> mTests;
+
+ @Mock TestDataRegistry<ByteString> mMeasurements;
+
+ interface TestDataRegistry<T> {
+ T get(String packageName, String runnerName, int shardIndex, int numShards);
+ }
+
+ /** Object under test */
+ CodeCoverageTestStub mCoverageTest;
+
+ @Before
+ public void setUp() throws DeviceNotAvailableException {
+ MockitoAnnotations.initMocks(this);
+ doAnswer(CALL_RUNNER)
+ .when(mDevice)
+ .runInstrumentationTests(
+ any(IRemoteAndroidTestRunner.class), any(ITestRunListener.class));
+ mCoverageTest = new CodeCoverageTestStub();
+ }
+
static enum FakeReportFormat implements CodeCoverageReportFormat {
CSV(LogDataType.JACOCO_CSV),
XML(LogDataType.JACOCO_XML),
@@ -103,39 +160,15 @@
public LogDataType getLogDataType() { return mLogDataType; }
}
- /**
- * A subclass of {@link CodeCoverageTest} with certain methods stubbed out for testing.
- */
- static class CodeCoverageTestStub extends CodeCoverageTestBase<FakeReportFormat> {
- private static final Answer<Void> CALL_RUNNER =
- new Answer<Void>() {
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- Object[] args = invocation.getArguments();
- ((IRemoteAndroidTestRunner) args[0]).run((ITestRunListener) args[1]);
- return null;
- }
- };
+ /** A subclass of {@link CodeCoverageTest} with certain methods stubbed out for testing. */
+ private class CodeCoverageTestStub extends CodeCoverageTestBase<FakeReportFormat> {
- private Map<InstrumentationTarget, List<TestIdentifier>> mTests = new HashMap<>();
- private Map<String, Boolean> mShardingEnabled = new HashMap<>();
+ // Captured data
+ private ImmutableList.Builder<ByteString> mCapturedMeasurements =
+ new ImmutableList.Builder<>();
- public CodeCoverageTestStub() throws DeviceNotAvailableException {
- // Set up a mock device that simply calls the runner
- ITestDevice device = mock(ITestDevice.class);
- doAnswer(CALL_RUNNER).when(device).runInstrumentationTests(
- any(IRemoteAndroidTestRunner.class), any(ITestRunListener.class));
- doReturn(true).when(device).doesFileExist(anyString());
- setDevice(device);
- }
-
- public void addTests(InstrumentationTarget target, Collection<TestIdentifier> tests) {
- mTests.putIfAbsent(target, new ArrayList<TestIdentifier>());
- mTests.get(target).addAll(tests);
- }
-
- public void setShardingEnabled(String runner, boolean shardingEnabled) {
- mShardingEnabled.put(runner, shardingEnabled);
+ public CodeCoverageTestStub() {
+ setDevice(mDevice);
}
@Override
@@ -146,141 +179,189 @@
return mock(ICompressionStrategy.class);
}
- @Override
- protected File generateCoverageReport(Collection<File> executionDataFiles,
- FakeReportFormat format) {
+ ImmutableList<ByteString> getMeasurements() {
+ return mCapturedMeasurements.build();
+ }
- return FAKE_COVERAGE_REPORT;
+ @Override
+ protected File generateCoverageReport(
+ Collection<File> measurementFiles, FakeReportFormat format) throws IOException {
+ // Capture the measurements for verification later
+ for (File measurementFile : measurementFiles) {
+ try (FileInputStream inputStream = new FileInputStream(measurementFile)) {
+ mCapturedMeasurements.add(ByteString.readFrom(inputStream));
+ }
+ }
+
+ // Write the fake report
+ File ret = mFolder.newFile();
+ FAKE_REPORT_CONTENTS.writeTo(new FileOutputStream(ret));
+ return ret;
}
@Override
protected List<FakeReportFormat> getReportFormat() {
- return Arrays.asList(FakeReportFormat.HTML);
+ return ImmutableList.of(FakeReportFormat.HTML);
}
@Override
- void doLogReport(String dataName, LogDataType dataType, File data, ITestLogger logger) {
- // Don't actually log anything
- }
-
- @Override
- CodeCoverageTest internalCreateCoverageTest() {
- return Mockito.spy(super.internalCreateCoverageTest());
+ InstrumentationTest internalCreateTest() {
+ return new InstrumentationTest() {
+ @Override
+ IRemoteAndroidTestRunner createRemoteAndroidTestRunner(
+ String packageName, String runnerName, IDevice device) {
+ return new FakeTestRunner(packageName, runnerName);
+ }
+ };
}
@Override
IRemoteAndroidTestRunner internalCreateTestRunner(String packageName, String runnerName) {
- // Look up tests for this target
- InstrumentationTarget target = new InstrumentationTarget(packageName, runnerName, "");
- List<TestIdentifier> tests = mTests.getOrDefault(target,
- new ArrayList<TestIdentifier>());
-
- // Return a fake AndroidTestRunner
- boolean shardingEnabled = mShardingEnabled.getOrDefault(runnerName, false);
- return Mockito.spy(new FakeTestRunner(packageName, runnerName, tests, shardingEnabled));
+ return new FakeTestRunner(packageName, runnerName);
}
@Override
ListInstrumentationParser internalCreateListInstrumentationParser() {
- // Return a fake ListInstrumentationParser
- return new ListInstrumentationParser() {
- @Override
- public List<InstrumentationTarget> getInstrumentationTargets() {
- return new ArrayList<>(mTests.keySet());
- }
- };
+ return mInstrumentationParser;
}
}
- public void testRun() throws DeviceNotAvailableException {
- // Prepare some test data
- InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
+ private static final class LogCaptorAnswer implements Answer<Void> {
+ private ByteString mValue;
- TestRunResult success = mock(TestRunResult.class);
- doReturn(false).when(success).isRunFailure();
- Map<String, String> fakeResultMap = new HashMap<>();
- fakeResultMap.put(CodeCoverageTest.COVERAGE_REMOTE_FILE_LABEL, "fakepath");
- doReturn(fakeResultMap).when(success).getRunMetrics();
+ @Override
+ public Void answer(InvocationOnMock invocation) throws Throwable {
+ Object[] args = invocation.getArguments();
+ InputStream reportStream = ((InputStreamSource) args[2]).createInputStream();
+ mValue = ByteString.readFrom(reportStream);
+ return null;
+ }
- // Mocking boilerplate
- ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
-
- doReturn(success).when(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
-
- // Run the test
- coverageTest.run(mockListener);
- // Verify that the test was run, and that the report was logged
- verify(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
- verify(coverageTest).generateCoverageReport(anyCollection(), eq(FakeReportFormat.HTML));
- verify(coverageTest).doLogReport(anyString(), eq(FakeReportFormat.HTML.getLogDataType()),
- eq(FAKE_COVERAGE_REPORT), any(ITestLogger.class));
+ ByteString getValue() {
+ return mValue;
+ }
}
- public void testRun_multipleInstrumentationTargets() throws DeviceNotAvailableException {
+ @Test
+ public void testRun() throws DeviceNotAvailableException, IOException {
// Prepare some test data
- InstrumentationTarget target1 = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
- InstrumentationTarget target2 = new InstrumentationTarget(PACKAGE_NAME2, RUNNER_NAME1, "");
- InstrumentationTarget target3 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME1, "");
-
- TestRunResult success = mock(TestRunResult.class);
- doReturn(false).when(success).isRunFailure();
+ doReturn(ImmutableList.of(new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "")))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
// Mocking boilerplate
- ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+ doReturn(FAKE_MEASUREMENT1)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
- doReturn(success).when(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
+ // Validate the report when it gets log, since the file will get cleaned up later
+ LogCaptorAnswer logCaptor = new LogCaptorAnswer();
+ doAnswer(logCaptor)
+ .when(mListener)
+ .testLog(
+ eq("coverage"),
+ eq(FakeReportFormat.HTML.getLogDataType()),
+ any(InputStreamSource.class));
// Run the test
- coverageTest.run(mockListener);
+ mCoverageTest.run(mListener);
- // Verify that all targets were run
- verify(coverageTest).runTest(eq(target1), eq(0), eq(1), any(ITestInvocationListener.class));
- verify(coverageTest).runTest(eq(target2), eq(0), eq(1), any(ITestInvocationListener.class));
- verify(coverageTest).runTest(eq(target3), eq(0), eq(1), any(ITestInvocationListener.class));
+ // Verify that the measurements were collected and the report was logged
+ assertThat(mCoverageTest.getMeasurements()).containsExactly(FAKE_MEASUREMENT1);
+ assertThat(logCaptor.getValue()).isEqualTo(FAKE_REPORT_CONTENTS);
}
+ @Test
+ public void testRun_multipleInstrumentationTargets()
+ throws DeviceNotAvailableException, IOException {
+ // Prepare some test data
+ ImmutableList<InstrumentationTarget> targets =
+ ImmutableList.of(
+ new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, ""),
+ new InstrumentationTarget(PACKAGE_NAME2, RUNNER_NAME1, ""),
+ new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME1, ""));
+ doReturn(targets).when(mInstrumentationParser).getInstrumentationTargets();
+
+ // Mocking boilerplate
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+ doReturn(FAKE_MEASUREMENT1)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+
+ doReturn(BAR_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME2), eq(RUNNER_NAME1), anyInt(), anyInt());
+ doReturn(FAKE_MEASUREMENT2)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME2), eq(RUNNER_NAME1), anyInt(), anyInt());
+
+ doReturn(BAZ_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME3), eq(RUNNER_NAME1), anyInt(), anyInt());
+ doReturn(FAKE_MEASUREMENT3)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME3), eq(RUNNER_NAME1), anyInt(), anyInt());
+
+ // Run the test
+ mCoverageTest.run(mListener);
+
+ // Verify that all targets were run by checking that we recieved measurements from each
+ assertThat(mCoverageTest.getMeasurements())
+ .containsExactly(FAKE_MEASUREMENT1, FAKE_MEASUREMENT2, FAKE_MEASUREMENT3);
+ }
+
+ @Test
public void testRun_multipleShards() throws DeviceNotAvailableException {
// Prepare some test data
- InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
-
- TestRunResult success = mock(TestRunResult.class);
- doReturn(false).when(success).isRunFailure();
-
- // Mocking boilerplate
- ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
- coverageTest.setShardingEnabled(RUNNER_NAME1, true);
-
- doReturn(success).when(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
+ doReturn(ImmutableList.of(new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "")))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ List<List<TestIdentifier>> shards = Lists.partition(FOO_TESTS, 1);
// Indicate that the test should be split into 3 shards
- int numShards = 3;
- doReturn(numShards).when(coverageTest).getNumberOfShards(any(InstrumentationTarget.class));
+ doReturn(FOO_TESTS).when(mTests).get(PACKAGE_NAME1, RUNNER_NAME1, 0, 1);
+ doReturn(FOO_TESTS.subList(0, FOO_TESTS.size() / 2))
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), eq(2));
+ mCoverageTest.setMaxTestsPerChunk(1);
+
+ // Return subsets of FOO_TESTS when running shards
+ doReturn(shards.get(0)).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), gt(1));
+ doReturn(FAKE_MEASUREMENT1)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), gt(1));
+
+ doReturn(shards.get(1)).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(1), gt(1));
+ doReturn(FAKE_MEASUREMENT2)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(1), gt(1));
+
+ doReturn(shards.get(2)).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(2), gt(1));
+ doReturn(FAKE_MEASUREMENT3)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(2), gt(1));
// Run the test
- coverageTest.run(mockListener);
+ mCoverageTest.run(mListener);
- // Verify that each shard was run
- for (int i = 0; i < numShards; i++) {
- verify(coverageTest).runTest(eq(target), eq(i), eq(numShards),
- any(ITestInvocationListener.class));
- }
+ // Verify that all shards were run by checking that we recieved measurements from each
+ assertThat(mCoverageTest.getMeasurements())
+ .containsExactly(FAKE_MEASUREMENT1, FAKE_MEASUREMENT2, FAKE_MEASUREMENT3);
}
+ @Test
public void testRun_rerunIndividualTests_failedRun() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
+ doReturn(ImmutableList.of(target)).when(mInstrumentationParser).getInstrumentationTargets();
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
TestRunResult success = mock(TestRunResult.class);
doReturn(false).when(success).isRunFailure();
@@ -290,7 +371,7 @@
// Mocking boilerplate
ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
+ doReturn(FOO_TESTS).when(mTests).get(PACKAGE_NAME1, RUNNER_NAME1, 0, 1);
doReturn(failure).when(coverageTest).runTest(eq(target), eq(0), eq(1),
any(ITestInvocationListener.class));
@@ -311,10 +392,15 @@
verify(coverageTest).runTest(eq(target), eq(FOO_TEST3), any(ITestInvocationListener.class));
}
+ @Test
public void testRun_rerunIndividualTests_missingCoverageFile()
throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
+ doReturn(ImmutableList.of(target)).when(mInstrumentationParser).getInstrumentationTargets();
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
TestRunResult success = mock(TestRunResult.class);
doReturn(false).when(success).isRunFailure();
@@ -322,10 +408,11 @@
// Mocking boilerplate
ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
+ doReturn(FOO_TESTS).when(mTests).get(PACKAGE_NAME1, RUNNER_NAME1, 0, 1);
- doReturn(success).when(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
+ doReturn(success)
+ .when(coverageTest)
+ .runTest(any(InstrumentationTest.class), any(ITestInvocationListener.class));
ITestDevice mockDevice = coverageTest.getDevice();
doReturn(false).doReturn(true).when(mockDevice).doesFileExist(anyString());
@@ -339,39 +426,41 @@
verify(coverageTest).runTest(eq(target), eq(FOO_TEST3), any(ITestInvocationListener.class));
}
- public void testRun_multipleFormats() throws DeviceNotAvailableException {
+ @Test
+ public void testRun_multipleFormats() throws DeviceNotAvailableException, IOException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
- File fakeHtmlReport = FAKE_COVERAGE_REPORT;
- File fakeXmlReport = new File("/some/fake/xml/report.xml");
+ doReturn(ImmutableList.of(target)).when(mInstrumentationParser).getInstrumentationTargets();
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+ doReturn(FAKE_MEASUREMENT1)
+ .when(mMeasurements)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
- TestRunResult success = mock(TestRunResult.class);
- doReturn(false).when(success).isRunFailure();
- Map<String, String> fakeResultMap = new HashMap<>();
- fakeResultMap.put(CodeCoverageTest.COVERAGE_REMOTE_FILE_LABEL, "fakepath");
- doReturn(fakeResultMap).when(success).getRunMetrics();
+ File fakeHtmlReport = new File("/some/fake/xml/report/");
+ File fakeXmlReport = new File("/some/fake/xml/report.xml");
// Mocking boilerplate
ITestInvocationListener mockListener = mock(ITestInvocationListener.class);
CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
- doReturn(Arrays.asList(FakeReportFormat.XML, FakeReportFormat.HTML))
- .when(coverageTest).getReportFormat();
+ doReturn(FOO_TESTS).when(mTests).get(PACKAGE_NAME1, RUNNER_NAME1, 0, 1);
+ doReturn(ImmutableList.of(FakeReportFormat.XML, FakeReportFormat.HTML))
+ .when(coverageTest)
+ .getReportFormat();
doReturn(fakeHtmlReport)
.when(coverageTest)
.generateCoverageReport(anyCollection(), eq(FakeReportFormat.HTML));
doReturn(fakeXmlReport)
.when(coverageTest)
.generateCoverageReport(anyCollection(), eq(FakeReportFormat.XML));
- doReturn(success).when(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
// Run the test
coverageTest.run(mockListener);
// Verify that the test was run, and that the reports were logged
- verify(coverageTest).runTest(any(CodeCoverageTest.class),
- any(ITestInvocationListener.class));
+ verify(coverageTest)
+ .runTest(any(InstrumentationTest.class), any(ITestInvocationListener.class));
verify(coverageTest).generateCoverageReport(anyCollection(), eq(FakeReportFormat.HTML));
verify(coverageTest).doLogReport(anyString(), eq(FakeReportFormat.HTML.getLogDataType()),
eq(fakeHtmlReport), any(ITestLogger.class));
@@ -380,6 +469,7 @@
eq(fakeXmlReport), any(ITestLogger.class));
}
+ @Test
public void testGetInstrumentationTargets() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target1 = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
@@ -388,19 +478,18 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that all of the instrumentation targets were found
- assertEquals(Sets.newHashSet(target1, target2, target3, target4), targets);
+ assertThat(targets).containsExactly(target1, target2, target3, target4);
}
+ @Test
public void testGetInstrumentationTargets_packageFilterSingleFilterSingleResult()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -410,20 +499,19 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(PACKAGE_NAME1)).when(coverageTest).getPackageFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setPackageFilter(ImmutableList.of(PACKAGE_NAME1));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that only the PACKAGE_NAME1 target was returned
- assertEquals(Sets.newHashSet(target1), targets);
+ assertThat(targets).containsExactly(target1);
}
+ @Test
public void testGetInstrumentationTargets_packageFilterSingleFilterMultipleResults()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -433,20 +521,19 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(PACKAGE_NAME3)).when(coverageTest).getPackageFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setPackageFilter(ImmutableList.of(PACKAGE_NAME3));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that both PACKAGE_NAME3 targets were returned
- assertEquals(Sets.newHashSet(target3, target4), targets);
+ assertThat(targets).containsExactly(target3, target4);
}
+ @Test
public void testGetInstrumentationTargets_packageFilterMultipleFilters()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -456,20 +543,19 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(PACKAGE_NAME1, PACKAGE_NAME2)).when(coverageTest).getPackageFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setPackageFilter(ImmutableList.of(PACKAGE_NAME1, PACKAGE_NAME2));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that the PACKAGE_NAME1 and PACKAGE_NAME2 targets were returned
- assertEquals(Sets.newHashSet(target1, target2), targets);
+ assertThat(targets).containsExactly(target1, target2);
}
+ @Test
public void testGetInstrumentationTargets_runnerFilterSingleFilterSingleResult()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -479,20 +565,19 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(RUNNER_NAME2)).when(coverageTest).getRunnerFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setRunnerFilter(ImmutableList.of(RUNNER_NAME2));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that only the RUNNER_NAME2 target was returned
- assertEquals(Sets.newHashSet(target2), targets);
+ assertThat(targets).containsExactly(target2);
}
+ @Test
public void testGetInstrumentationTargets_runnerFilterSingleFilterMultipleResults()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -502,20 +587,19 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(RUNNER_NAME1)).when(coverageTest).getRunnerFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setRunnerFilter(ImmutableList.of(RUNNER_NAME1));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that both RUNNER_NAME1 targets were returned
- assertEquals(Sets.newHashSet(target1, target3), targets);
+ assertThat(targets).containsExactly(target1, target3);
}
+ @Test
public void testGetInstrumentationTargets_runnerFilterMultipleFilters()
throws DeviceNotAvailableException {
// Prepare some test data
@@ -525,46 +609,50 @@
InstrumentationTarget target4 = new InstrumentationTarget(PACKAGE_NAME3, RUNNER_NAME3, "");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target1, FOO_TESTS);
- coverageTest.addTests(target2, BAR_TESTS);
- coverageTest.addTests(target3, BAZ_TESTS);
- coverageTest.addTests(target4, BAZ_TESTS);
- doReturn(Arrays.asList(RUNNER_NAME2, RUNNER_NAME3)).when(coverageTest).getRunnerFilter();
+ doReturn(ImmutableList.of(target1, target2, target3, target4))
+ .when(mInstrumentationParser)
+ .getInstrumentationTargets();
+ mCoverageTest.setRunnerFilter(ImmutableList.of(RUNNER_NAME2, RUNNER_NAME3));
// Get the instrumentation targets
- Collection<InstrumentationTarget> targets = coverageTest.getInstrumentationTargets();
+ Collection<InstrumentationTarget> targets = mCoverageTest.getInstrumentationTargets();
// Verify that the RUNNER_NAME2 and RUNNER_NAME3 targets were returned
- assertEquals(Sets.newHashSet(target2, target4), targets);
+ assertThat(targets).containsExactly(target2, target4);
}
+ @Test
public void testDoesRunnerSupportSharding_true() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
// Set up mocks. Return fewer tests when sharding is enabled.
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
- coverageTest.setShardingEnabled(RUNNER_NAME1, true);
+ doReturn(ImmutableList.of(target)).when(mInstrumentationParser).getInstrumentationTargets();
+ doReturn(FOO_TESTS).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), eq(1));
+ doReturn(Lists.partition(FOO_TESTS, 2).get(0))
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), gt(1));
// Verify that the method returns true
- assertTrue(coverageTest.doesRunnerSupportSharding(target));
+ assertThat(mCoverageTest.doesRunnerSupportSharding(target)).isTrue();
}
+ @Test
public void testDoesRunnerSupportSharding_false() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
// Set up mocks. Return the same number of tests for any number of shards.
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, FOO_TESTS);
- coverageTest.setShardingEnabled(RUNNER_NAME1, false);
+ doReturn(ImmutableList.of(target)).when(mInstrumentationParser).getInstrumentationTargets();
+ doReturn(FOO_TESTS)
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
// Verify that the method returns false
- assertFalse(coverageTest.doesRunnerSupportSharding(target));
+ assertThat(mCoverageTest.doesRunnerSupportSharding(target)).isFalse();
}
+ @Test
public void testGetNumberOfShards() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
@@ -574,14 +662,14 @@
}
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, tests);
- doReturn(1).when(coverageTest).getMaxTestsPerChunk();
+ doReturn(tests).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+ mCoverageTest.setMaxTestsPerChunk(1);
// Verify that each test will run in a separate shard
- assertEquals(tests.size(), coverageTest.getNumberOfShards(target));
+ assertThat(mCoverageTest.getNumberOfShards(target)).isEqualTo(tests.size());
}
+ @Test
public void testGetNumberOfShards_allTestsInSingleShard() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
@@ -591,14 +679,14 @@
}
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, tests);
- doReturn(10).when(coverageTest).getMaxTestsPerChunk();
+ doReturn(tests).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
+ mCoverageTest.setMaxTestsPerChunk(10);
// Verify that all tests will run in a single shard
- assertEquals(1, coverageTest.getNumberOfShards(target));
+ assertThat(mCoverageTest.getNumberOfShards(target)).isEqualTo(1);
}
+ @Test
public void testCollectTests() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
@@ -608,16 +696,16 @@
}
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, tests);
+ doReturn(tests).when(mTests).get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), anyInt(), anyInt());
// Collect the tests
- Collection<TestIdentifier> collectedTests = coverageTest.collectTests(target, 0, 1);
+ Collection<TestIdentifier> collectedTests = mCoverageTest.collectTests(target, 0, 1);
// Verify that all of the tests were returned
- assertEquals(new HashSet<TestIdentifier>(tests), collectedTests);
+ assertThat(collectedTests).containsExactlyElementsIn(tests);
}
+ @Test
public void testCollectTests_withShards() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
@@ -628,182 +716,202 @@
}
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- coverageTest.addTests(target, tests);
- coverageTest.setShardingEnabled(RUNNER_NAME1, true);
+ mCoverageTest.setMaxTestsPerChunk((int) Math.ceil((double) tests.size() / numShards));
+ doReturn(tests).when(mTests).get(PACKAGE_NAME1, RUNNER_NAME1, 0, 1);
+ doReturn(tests.subList(0, tests.size() / 2))
+ .when(mTests)
+ .get(eq(PACKAGE_NAME1), eq(RUNNER_NAME1), eq(0), eq(2));
+
+ List<List<TestIdentifier>> shards =
+ Lists.partition(tests, (int) Math.ceil((double) tests.size() / numShards));
+ int currentIndex = 0;
+ for (List<TestIdentifier> shard : shards) {
+ doReturn(shards.get(currentIndex))
+ .when(mTests)
+ .get(PACKAGE_NAME1, RUNNER_NAME1, currentIndex, shards.size());
+ currentIndex++;
+ }
// Collect the tests in shards
ArrayList<TestIdentifier> allCollectedTests = new ArrayList<>();
for (int shardIndex = 0; shardIndex < numShards; shardIndex++) {
// Verify that each shard contains some tests
Collection<TestIdentifier> collectedTests =
- coverageTest.collectTests(target, shardIndex, numShards);
- assertFalse(collectedTests.isEmpty());
+ mCoverageTest.collectTests(target, shardIndex, numShards);
+ assertThat(collectedTests).containsExactlyElementsIn(shards.get(shardIndex));
allCollectedTests.addAll(collectedTests);
}
// Verify that all of the tests were returned in the end
- assertEquals(tests, allCollectedTests);
+ assertThat(allCollectedTests).containsExactlyElementsIn(tests);
}
+ @Test
public void testCreateTestRunner() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
- // Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
-
// Create a test runner
- IRemoteAndroidTestRunner runner = coverageTest.createTestRunner(target, 0, 1);
+ IRemoteAndroidTestRunner runner = mCoverageTest.createTestRunner(target, 0, 1);
// Verify that the runner has the correct values
- assertEquals(PACKAGE_NAME1, runner.getPackageName());
- assertEquals(RUNNER_NAME1, runner.getRunnerName());
+ assertThat(runner.getPackageName()).isEqualTo(PACKAGE_NAME1);
+ assertThat(runner.getRunnerName()).isEqualTo(RUNNER_NAME1);
}
+ @Test
public void testCreateTestRunner_withArgs() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
Map<String, String> args = ImmutableMap.of("arg1", "value1", "arg2", "value2");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- doReturn(args).when(coverageTest).getInstrumentationArgs();
+ mCoverageTest.setInstrumentationArgs(args);
// Create a test runner
- IRemoteAndroidTestRunner runner = coverageTest.createTestRunner(target, 0, 1);
+ FakeTestRunner runner = (FakeTestRunner) mCoverageTest.createTestRunner(target, 0, 1);
// Verify that the addInstrumentationArg(..) method was called with each argument
- for (Map.Entry<String, String> entry : args.entrySet()) {
- verify(runner).addInstrumentationArg(entry.getKey(), entry.getValue());
- }
+ assertThat(runner.getArgs()).containsExactlyEntriesIn(args);
}
+ @Test
public void testCreateTestRunner_withShards() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
int shardIndex = 3;
int numShards = 5;
- // Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
-
// Create a test runner
- IRemoteAndroidTestRunner runner =
- coverageTest.createTestRunner(target, shardIndex, numShards);
+ FakeTestRunner runner =
+ (FakeTestRunner) mCoverageTest.createTestRunner(target, shardIndex, numShards);
// Verify that the addInstrumentationArg(..) method was called to configure the shards
- verify(runner).addInstrumentationArg("shardIndex", Integer.toString(shardIndex));
- verify(runner).addInstrumentationArg("numShards", Integer.toString(numShards));
+ assertThat(runner.getArgs()).containsEntry("shardIndex", Integer.toString(shardIndex));
+ assertThat(runner.getArgs()).containsEntry("numShards", Integer.toString(numShards));
}
+ @Test
public void testCreateCoverageTest() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
- // Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
-
// Create a CodeCoverageTest instance
- CodeCoverageTest test = coverageTest.createCoverageTest(target);
+ InstrumentationTest test = mCoverageTest.createTest(target);
// Verify that the test has the correct values
- assertEquals(PACKAGE_NAME1, test.getPackageName());
- assertEquals(RUNNER_NAME1, test.getRunnerName());
+ assertThat(test.getPackageName()).isEqualTo(PACKAGE_NAME1);
+ assertThat(test.getRunnerName()).isEqualTo(RUNNER_NAME1);
+ assertThat(test.getInstrumentationArg("coverage")).isEqualTo("true");
}
+ @Test
public void testCreateCoverageTest_withArgs() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
Map<String, String> args = ImmutableMap.of("arg1", "value1", "arg2", "value2");
// Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
- doReturn(args).when(coverageTest).getInstrumentationArgs();
+ mCoverageTest.setInstrumentationArgs(args);
// Create a CodeCoverageTest instance
- CodeCoverageTest test = coverageTest.createCoverageTest(target, 0, 3);
+ InstrumentationTest test = mCoverageTest.createTest(target, 0, 3);
- // Verify that the addInstrumentationArg(..) method was called with each argument
+ // Verify that the test has the correct values
for (Map.Entry<String, String> entry : args.entrySet()) {
- verify(test).addInstrumentationArg(entry.getKey(), entry.getValue());
+ assertThat(test.getInstrumentationArg(entry.getKey())).isEqualTo(entry.getValue());
}
}
+ @Test
public void testCreateCoverageTest_withShards() throws DeviceNotAvailableException {
// Prepare some test data
InstrumentationTarget target = new InstrumentationTarget(PACKAGE_NAME1, RUNNER_NAME1, "");
int shardIndex = 3;
int numShards = 5;
- // Set up mocks
- CodeCoverageTestStub coverageTest = Mockito.spy(new CodeCoverageTestStub());
-
// Create a CodeCoverageTest instance
- CodeCoverageTest test = coverageTest.createCoverageTest(target, shardIndex, numShards);
+ InstrumentationTest test = mCoverageTest.createTest(target, shardIndex, numShards);
// Verify that the addInstrumentationArg(..) method was called to configure the shards
- verify(test).addInstrumentationArg("shardIndex", Integer.toString(shardIndex));
- verify(test).addInstrumentationArg("numShards", Integer.toString(numShards));
+ assertThat(test.getInstrumentationArg("shardIndex"))
+ .isEqualTo(Integer.toString(shardIndex));
+ assertThat(test.getInstrumentationArg("numShards")).isEqualTo(Integer.toString(numShards));
+ assertThat(test.getInstrumentationArg("coverage")).isEqualTo("true");
}
+ private static final Answer<Void> CALL_RUNNER =
+ invocation -> {
+ Object[] args = invocation.getArguments();
+ ((IRemoteAndroidTestRunner) args[0]).run((ITestRunListener) args[1]);
+ return null;
+ };
+
/**
- * A fake {@link IRemoteAndroidTestRunner} which simulates a test run by notifying the
- * {@link ITestRunListener}s but does not actually run anything.
+ * A fake {@link IRemoteAndroidTestRunner} which simulates a test run by notifying the {@link
+ * ITestRunListener}s but does not actually run anything.
*/
- static class FakeTestRunner extends RemoteAndroidTestRunner {
- private List<TestIdentifier> mTests;
- private boolean mShardingEnabled = false;
- private int mShardIndex = 0;
- private int mNumShards = 1;
+ private class FakeTestRunner extends RemoteAndroidTestRunner {
+ private Map<String, String> mArgs = new HashMap<>();
- public FakeTestRunner(String packageName, String runnerName, List<TestIdentifier> tests,
- boolean shardingEnabled) {
+ FakeTestRunner(String packageName, String runnerName) {
super(packageName, runnerName, null);
-
- mTests = tests;
- mShardingEnabled = shardingEnabled;
}
@Override
public void addInstrumentationArg(String name, String value) {
super.addInstrumentationArg(name, value);
+ mArgs.put(name, value);
+ }
- if ("shardIndex".equals(name)) {
- mShardIndex = Integer.parseInt(value);
- }
- if ("numShards".equals(name)) {
- mNumShards = Integer.parseInt(value);
- }
+ Map<String, String> getArgs() {
+ return mArgs;
}
@Override
public void run(Collection<ITestRunListener> listeners) {
- List<TestIdentifier> tests = mTests;
- if (mShardingEnabled) {
- int shardSize = (int)Math.ceil((double)tests.size() / mNumShards);
- tests = Lists.partition(tests, shardSize).get(mShardIndex);
- }
+ int shardIndex = Integer.parseInt(getArgs().getOrDefault("shardIndex", "0"));
+ int numShards = Integer.parseInt(getArgs().getOrDefault("numShards", "1"));
+ List<TestIdentifier> tests =
+ mTests.get(getPackageName(), getRunnerName(), shardIndex, numShards);
// Start the test run
- for (ITestRunListener listener : listeners) {
- listener.testRunStarted(getPackageName(), tests.size());
- }
+ listeners.stream().forEach(l -> l.testRunStarted(getPackageName(), tests.size()));
+
// Run each of the tests
for (TestIdentifier test : tests) {
- // Start test
- for (ITestRunListener listener : listeners) {
- listener.testStarted(test);
- }
- // Finish test
- for (ITestRunListener listener : listeners) {
- listener.testEnded(test, new HashMap<String, String>());
- }
+ listeners.stream().forEach(l -> l.testStarted(test));
+ listeners.stream().forEach(l -> l.testEnded(test, ImmutableMap.of()));
}
+
+ // Mock out the coverage measurement if necessary
+ Map<String, String> metrics = new HashMap<>();
+ if (getArgs().getOrDefault("coverage", "false").equals("true")) {
+ String devicePath = String.format(COVERAGE_PATH, getPackageName());
+ ByteString measurement =
+ mMeasurements.get(getPackageName(), getRunnerName(), shardIndex, numShards);
+ mockDeviceFile(devicePath, measurement);
+ metrics.put(CodeCoverageTest.COVERAGE_REMOTE_FILE_LABEL, devicePath);
+ }
+
// End the test run
- for (ITestRunListener listener : listeners) {
- listener.testRunEnded(1000, new HashMap<String, String>());
- }
+ listeners.stream().forEach(l -> l.testRunEnded(TEST_RUN_TIME, metrics));
+ }
+ }
+
+ private void mockDeviceFile(String devicePath, ByteString contents) {
+ Answer<File> pullFile =
+ unused -> {
+ File ret = mFolder.newFile();
+ contents.writeTo(new FileOutputStream(ret));
+ return ret;
+ };
+ try {
+ doReturn(true).when(mDevice).doesFileExist(devicePath);
+ doAnswer(pullFile).when(mDevice).pullFile(devicePath);
+ } catch (DeviceNotAvailableException impossible) {
+ // Mocks won't actually throw.
+ throw new AssertionError(impossible);
}
}
}
diff --git a/tests/src/com/android/tradefed/testtype/DeviceSuiteTest.java b/tests/src/com/android/tradefed/testtype/DeviceSuiteTest.java
index 6864287..27284cc 100644
--- a/tests/src/com/android/tradefed/testtype/DeviceSuiteTest.java
+++ b/tests/src/com/android/tradefed/testtype/DeviceSuiteTest.java
@@ -19,12 +19,15 @@
import com.android.ddmlib.testrunner.TestIdentifier;
import com.android.tradefed.build.IBuildInfo;
+import com.android.tradefed.config.Option;
import com.android.tradefed.config.OptionSetter;
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.result.ITestInvocationListener;
+import com.android.tradefed.testtype.DeviceJUnit4ClassRunner.TestMetrics;
import org.easymock.EasyMock;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Suite.SuiteClasses;
@@ -32,6 +35,8 @@
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
/**
* Unit Tests for {@link DeviceSuite}
@@ -64,6 +69,11 @@
public static IBuildInfo sBuildInfo;
public static IAbi sAbi;
+ @Rule public TestMetrics metrics = new TestMetrics();
+
+ @Option(name = "option")
+ private String mOption = null;
+
public Junit4DeviceTestclass() {
sDevice = null;
sBuildInfo = null;
@@ -72,7 +82,11 @@
@Test
@MyAnnotation1
- public void testPass1() {}
+ public void testPass1() {
+ if (mOption != null) {
+ metrics.addTestMetric("option", mOption);
+ }
+ }
@Test
public void testPass2() {}
@@ -154,4 +168,29 @@
mHostTest.run(mListener);
EasyMock.verify(mListener, mMockDevice);
}
+
+ /** Tests that options are piped from Suite to the sub-runners. */
+ @Test
+ public void testRun_withOption() throws Exception {
+ OptionSetter setter = new OptionSetter(mHostTest);
+ setter.setOptionValue("class", Junit4DeviceSuite.class.getName());
+ setter.setOptionValue("set-option", "option:value_test");
+ mListener.testRunStarted(
+ EasyMock.eq("com.android.tradefed.testtype.DeviceSuiteTest$Junit4DeviceSuite"),
+ EasyMock.eq(2));
+ TestIdentifier test1 =
+ new TestIdentifier(Junit4DeviceTestclass.class.getName(), "testPass1");
+ TestIdentifier test2 =
+ new TestIdentifier(Junit4DeviceTestclass.class.getName(), "testPass2");
+ mListener.testStarted(EasyMock.eq(test1));
+ Map<String, String> expected = new HashMap<>();
+ expected.put("option", "value_test");
+ mListener.testEnded(EasyMock.eq(test1), EasyMock.eq(expected));
+ mListener.testStarted(EasyMock.eq(test2));
+ mListener.testEnded(EasyMock.eq(test2), EasyMock.eq(Collections.emptyMap()));
+ mListener.testRunEnded(EasyMock.anyLong(), EasyMock.eq(Collections.emptyMap()));
+ EasyMock.replay(mListener, mMockDevice);
+ mHostTest.run(mListener);
+ EasyMock.verify(mListener, mMockDevice);
+ }
}
diff --git a/tests/src/com/android/tradefed/testtype/GTestTest.java b/tests/src/com/android/tradefed/testtype/GTestTest.java
index baf89e1..b5b8ed6 100644
--- a/tests/src/com/android/tradefed/testtype/GTestTest.java
+++ b/tests/src/com/android/tradefed/testtype/GTestTest.java
@@ -15,39 +15,41 @@
*/
package com.android.tradefed.testtype;
+import static org.junit.Assert.*;
+
import com.android.ddmlib.FileListingService;
import com.android.ddmlib.IShellOutputReceiver;
import com.android.ddmlib.testrunner.ITestRunListener;
+import com.android.tradefed.config.OptionSetter;
import com.android.tradefed.device.CollectingOutputReceiver;
import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.device.MockFileUtil;
import com.android.tradefed.result.ITestInvocationListener;
-import junit.framework.TestCase;
-
import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
import java.io.File;
import java.util.concurrent.TimeUnit;
-/**
- * Unit tests for {@link GTest}.
- */
-public class GTestTest extends TestCase {
+/** Unit tests for {@link GTest}. */
+@RunWith(JUnit4.class)
+public class GTestTest {
private static final String GTEST_FLAG_FILTER = "--gtest_filter";
private ITestInvocationListener mMockInvocationListener = null;
private IShellOutputReceiver mMockReceiver = null;
private ITestDevice mMockITestDevice = null;
private GTest mGTest;
+ private OptionSetter mSetter;
- /**
- * Helper to initialize the various EasyMocks we'll need.
- */
- @Override
- protected void setUp() throws Exception {
- super.setUp();
+ /** Helper to initialize the various EasyMocks we'll need. */
+ @Before
+ public void setUp() throws Exception {
mMockInvocationListener = EasyMock.createMock(ITestInvocationListener.class);
mMockReceiver = EasyMock.createMock(IShellOutputReceiver.class);
mMockITestDevice = EasyMock.createMock(ITestDevice.class);
@@ -70,6 +72,7 @@
}
};
mGTest.setDevice(mMockITestDevice);
+ mSetter = new OptionSetter(mGTest);
}
/**
@@ -86,9 +89,8 @@
EasyMock.verify(mMockInvocationListener, mMockITestDevice, mMockReceiver);
}
- /**
- * Test run when the test dir is not found on the device.
- */
+ /** Test run when the test dir is not found on the device. */
+ @Test
public void testRun_noTestDir() throws DeviceNotAvailableException {
EasyMock.expect(mMockITestDevice.doesFileExist(GTest.DEFAULT_NATIVETEST_PATH))
.andReturn(false);
@@ -97,9 +99,8 @@
verifyMocks();
}
- /**
- * Test run when no device is set should throw an exception.
- */
+ /** Test run when no device is set should throw an exception. */
+ @Test
public void testRun_noDevice() throws DeviceNotAvailableException {
mGTest.setDevice(null);
replayMocks();
@@ -112,9 +113,8 @@
verifyMocks();
}
- /**
- * Test the run method for a couple tests
- */
+ /** Test the run method for a couple tests */
+ @Test
public void testRun() throws DeviceNotAvailableException {
final String nativeTestPath = GTest.DEFAULT_NATIVETEST_PATH;
final String test1 = "test1";
@@ -147,9 +147,8 @@
verifyMocks();
}
- /**
- * Test the run method when module name is specified
- */
+ /** Test the run method when module name is specified */
+ @Test
public void testRun_moduleName() throws DeviceNotAvailableException {
final String module = "test1";
final String modulePath = String.format("%s%s%s",
@@ -173,9 +172,8 @@
verifyMocks();
}
- /**
- * Test the run method for a test in a subdirectory
- */
+ /** Test the run method for a test in a subdirectory */
+ @Test
public void testRun_nested() throws DeviceNotAvailableException {
final String nativeTestPath = GTest.DEFAULT_NATIVETEST_PATH;
final String subFolderName = "subFolder";
@@ -236,9 +234,8 @@
verifyMocks();
}
- /**
- * Test the include filtering of test methods.
- */
+ /** Test the include filtering of test methods. */
+ @Test
public void testIncludeFilter() throws DeviceNotAvailableException {
String includeFilter1 = "abc";
String includeFilter2 = "def";
@@ -247,9 +244,8 @@
doTestFilter(String.format("%s=%s:%s", GTEST_FLAG_FILTER, includeFilter1, includeFilter2));
}
- /**
- * Test the exclude filtering of test methods.
- */
+ /** Test the exclude filtering of test methods. */
+ @Test
public void testExcludeFilter() throws DeviceNotAvailableException {
String excludeFilter1 = "*don?tRunMe*";
mGTest.addExcludeFilter(excludeFilter1);
@@ -258,9 +254,8 @@
"%s=-%s", GTEST_FLAG_FILTER, excludeFilter1));
}
- /**
- * Test simultaneous include and exclude filtering of test methods.
- */
+ /** Test simultaneous include and exclude filtering of test methods. */
+ @Test
public void testIncludeAndExcludeFilters() throws DeviceNotAvailableException {
String includeFilter1 = "pleaseRunMe";
String includeFilter2 = "andMe";
@@ -275,9 +270,8 @@
includeFilter1, includeFilter2, excludeFilter1, excludeFilter2));
}
- /**
- * Test behavior for command lines too long to be run by ADB
- */
+ /** Test behavior for command lines too long to be run by ADB */
+ @Test
public void testCommandTooLong() throws DeviceNotAvailableException {
String deviceScriptPath = "/data/local/tmp/gtest_script.sh";
StringBuilder filterString = new StringBuilder(GTEST_FLAG_FILTER);
@@ -325,6 +319,7 @@
}
/** Empty file exclusion regex filter should not skip any files */
+ @Test
public void testFileExclusionRegexFilter_emptyfilters() throws Exception {
// report /test_file as executable
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
@@ -337,12 +332,14 @@
}
/** File exclusion regex filter should skip invalid filepath. */
+ @Test
public void testFileExclusionRegexFilter_invalidInputString() throws Exception {
assertTrue(mGTest.shouldSkipFile(null));
assertTrue(mGTest.shouldSkipFile(""));
}
/** File exclusion regex filter should skip matched filepaths. */
+ @Test
public void testFileExclusionRegexFilter_skipMatched() throws Exception {
// report all files as executable
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
@@ -363,6 +360,7 @@
}
/** File exclusion regex filter for multi filters. */
+ @Test
public void testFileExclusionRegexFilter_skipMultiMatched() throws Exception {
// report all files as executable
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
@@ -384,11 +382,10 @@
assertTrue(mGTest.shouldSkipFile("/some/path/file/run_me.not2"));
}
- /**
- * Test the run method for a couple tests
- */
- public void testRunXml() throws DeviceNotAvailableException {
- mGTest.setEnableXmlOutput(true);
+ /** Test the run method for a couple tests */
+ @Test
+ public void testRunXml() throws Exception {
+ mSetter.setOptionValue("xml-output", "true");
final String nativeTestPath = GTest.DEFAULT_NATIVETEST_PATH;
final String test1 = "test1";
@@ -424,6 +421,7 @@
verifyMocks();
}
+ @Test
public void testGetFileName() {
String expected = "bar";
String s1 = "/foo/" + expected;
@@ -439,12 +437,11 @@
}
}
- /**
- * Test the include filtering by file of test methods.
- */
- public void testFileFilter() throws DeviceNotAvailableException {
+ /** Test the include filtering by file of test methods. */
+ @Test
+ public void testFileFilter() throws Exception {
String fileFilter = "presubmit";
- mGTest.setLoadFilterFromFile(fileFilter);
+ mSetter.setOptionValue("test-filter-key", fileFilter);
String expectedFilterFile = String.format("%s/test1%s",
GTest.DEFAULT_NATIVETEST_PATH, GTest.FILTER_EXTENSION);
String fakeContent = "{\n" +
@@ -465,9 +462,10 @@
* Test the include filtering by providing a non existing filter. No filter will be applied in
* this case.
*/
- public void testFileFilter_notfound() throws DeviceNotAvailableException {
+ @Test
+ public void testFileFilter_notfound() throws Exception {
String fileFilter = "garbage";
- mGTest.setLoadFilterFromFile(fileFilter);
+ mSetter.setOptionValue("test-filter-key", fileFilter);
String expectedFilterFile = String.format("%s/test1%s",
GTest.DEFAULT_NATIVETEST_PATH, GTest.FILTER_EXTENSION);
String fakeContent = "{\n" +
@@ -485,8 +483,27 @@
}
/**
- * Test GTest command line string for sharded tests.
+ * Test {@link GTest#getGTestCmdLine(String, String) with default options.
*/
+ @Test
+ public void testGetGTestCmdLine_defaults() {
+ String cmd_line = mGTest.getGTestCmdLine("test_path", "flags");
+ assertEquals("test_path flags", cmd_line);
+ }
+
+ /**
+ * Test {@link GTest#getGTestCmdLine(String, String) with non-default user.
+ */
+ @Test
+ public void testGetGTestCmdLine_runAs() throws Exception {
+ mSetter.setOptionValue("run-test-as", "shell");
+
+ String cmd_line = mGTest.getGTestCmdLine("test_path", "flags");
+ assertEquals("su shell test_path flags", cmd_line);
+ }
+
+ /** Test GTest command line string for sharded tests. */
+ @Test
public void testGetGTestCmdLine_testShard() {
mGTest.setShardIndex(1);
mGTest.setShardCount(3);
@@ -500,6 +517,7 @@
*
* @throws Exception
*/
+ @Test
public void testIsDeviceFileExecutable_executable_rwx() throws Exception {
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
EasyMock.expect(mockDevice.executeShellCommand("ls -l /system/bin/ping"))
@@ -516,6 +534,7 @@
*
* @throws Exception
*/
+ @Test
public void testIsDeviceFileExecutable_executable_lrwx() throws Exception {
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
EasyMock.expect(mockDevice.executeShellCommand("ls -l /system/bin/start"))
@@ -532,6 +551,7 @@
*
* @throws Exception
*/
+ @Test
public void testIsDeviceFileExecutable_notExecutable() throws Exception {
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
EasyMock.expect(mockDevice.executeShellCommand("ls -l /system/build.prop"))
@@ -547,6 +567,7 @@
*
* @throws Exception
*/
+ @Test
public void testIsDeviceFileExecutable_directory() throws Exception {
ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
EasyMock.expect(mockDevice.executeShellCommand("ls -l /system"))
diff --git a/tests/src/com/android/tradefed/testtype/HostTestTest.java b/tests/src/com/android/tradefed/testtype/HostTestTest.java
index eed77d5..c4f3507 100644
--- a/tests/src/com/android/tradefed/testtype/HostTestTest.java
+++ b/tests/src/com/android/tradefed/testtype/HostTestTest.java
@@ -37,10 +37,12 @@
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.runner.RunWith;
import org.junit.runners.BlockJUnit4ClassRunner;
+import org.junit.runners.JUnit4;
import org.junit.runners.Suite.SuiteClasses;
import org.junit.runners.model.InitializationError;
@@ -636,17 +638,17 @@
TestIdentifier test3 = new TestIdentifier(AnotherTestCase.class.getName(), "testPass3");
TestIdentifier test4 = new TestIdentifier(AnotherTestCase.class.getName(), "testPass4");
mListener.testRunStarted((String)EasyMock.anyObject(), EasyMock.eq(2));
- EasyMock.expectLastCall().times(2);
mListener.testStarted(EasyMock.eq(test1));
mListener.testEnded(EasyMock.eq(test1), (Map<String, String>)EasyMock.anyObject());
mListener.testStarted(EasyMock.eq(test2));
mListener.testEnded(EasyMock.eq(test2), (Map<String, String>)EasyMock.anyObject());
+ mListener.testRunEnded(EasyMock.anyLong(), (Map<String, String>) EasyMock.anyObject());
+ mListener.testRunStarted((String) EasyMock.anyObject(), EasyMock.eq(2));
mListener.testStarted(EasyMock.eq(test3));
mListener.testEnded(EasyMock.eq(test3), (Map<String, String>)EasyMock.anyObject());
mListener.testStarted(EasyMock.eq(test4));
mListener.testEnded(EasyMock.eq(test4), (Map<String, String>)EasyMock.anyObject());
mListener.testRunEnded(EasyMock.anyLong(), (Map<String, String>)EasyMock.anyObject());
- EasyMock.expectLastCall().times(2);
EasyMock.replay(mListener);
mHostTest.run(mListener);
EasyMock.verify(mListener);
@@ -1894,4 +1896,89 @@
}
EasyMock.verify(mListener, mMockDevice);
}
+
+ /** JUnit 4 class that throws within its @BeforeClass */
+ @RunWith(JUnit4.class)
+ public static class JUnit4FailedBeforeClass {
+ @BeforeClass
+ public static void beforeClass() {
+ throw new RuntimeException();
+ }
+
+ @org.junit.Test
+ public void test1() {}
+ }
+
+ /**
+ * Test that when an exception is thrown from within @BeforeClass, we correctly report a failure
+ * since we cannot run each individual test.
+ */
+ public void testRun_junit4ExceptionBeforeClass() throws Exception {
+ OptionSetter setter = new OptionSetter(mHostTest);
+ setter.setOptionValue("class", JUnit4FailedBeforeClass.class.getName());
+ setter.setOptionValue("class", Junit4TestClass.class.getName());
+ // First class fail with the run failure
+ mListener.testRunStarted(EasyMock.anyObject(), EasyMock.eq(1));
+ mListener.testRunFailed(EasyMock.contains("Failed with trace:"));
+ mListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+
+ // Second class run properly
+ mListener.testRunStarted(EasyMock.anyObject(), EasyMock.eq(2));
+ TestIdentifier tid2 = new TestIdentifier(Junit4TestClass.class.getName(), "testPass5");
+ mListener.testStarted(EasyMock.eq(tid2));
+ mListener.testEnded(EasyMock.eq(tid2), EasyMock.anyObject());
+ TestIdentifier tid3 = new TestIdentifier(Junit4TestClass.class.getName(), "testPass6");
+ mListener.testStarted(EasyMock.eq(tid3));
+ mListener.testEnded(EasyMock.eq(tid3), EasyMock.anyObject());
+ mListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+
+ EasyMock.replay(mListener);
+ assertEquals(3, mHostTest.countTestCases());
+ mHostTest.run(mListener);
+ EasyMock.verify(mListener);
+ }
+
+ /** JUnit4 class that throws within its @Before */
+ @RunWith(JUnit4.class)
+ public static class JUnit4FailedBefore {
+ @Before
+ public void before() {
+ throw new RuntimeException();
+ }
+
+ @org.junit.Test
+ public void test1() {}
+ }
+
+ /**
+ * Test that when an exception is thrown within @Before, the test are reported and failed with
+ * the exception.
+ */
+ public void testRun_junit4ExceptionBefore() throws Exception {
+ OptionSetter setter = new OptionSetter(mHostTest);
+ setter.setOptionValue("class", JUnit4FailedBefore.class.getName());
+ setter.setOptionValue("class", Junit4TestClass.class.getName());
+ // First class has a test failure because of the @Before
+ mListener.testRunStarted(EasyMock.anyObject(), EasyMock.eq(1));
+ TestIdentifier tid = new TestIdentifier(JUnit4FailedBefore.class.getName(), "test1");
+ mListener.testStarted(EasyMock.eq(tid));
+ mListener.testFailed(EasyMock.eq(tid), EasyMock.anyObject());
+ mListener.testEnded(EasyMock.eq(tid), EasyMock.anyObject());
+ mListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+
+ // Second class run properly
+ mListener.testRunStarted(EasyMock.anyObject(), EasyMock.eq(2));
+ TestIdentifier tid2 = new TestIdentifier(Junit4TestClass.class.getName(), "testPass5");
+ mListener.testStarted(EasyMock.eq(tid2));
+ mListener.testEnded(EasyMock.eq(tid2), EasyMock.anyObject());
+ TestIdentifier tid3 = new TestIdentifier(Junit4TestClass.class.getName(), "testPass6");
+ mListener.testStarted(EasyMock.eq(tid3));
+ mListener.testEnded(EasyMock.eq(tid3), EasyMock.anyObject());
+ mListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+
+ EasyMock.replay(mListener);
+ assertEquals(3, mHostTest.countTestCases());
+ mHostTest.run(mListener);
+ EasyMock.verify(mListener);
+ }
}
diff --git a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
index 1b737e6..836ef62 100644
--- a/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
+++ b/tests/src/com/android/tradefed/testtype/PythonUnitTestResultParserTest.java
@@ -20,7 +20,6 @@
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import com.android.ddmlib.testrunner.ITestRunListener;
@@ -43,7 +42,57 @@
mParser = new PythonUnitTestResultParser(ArrayUtil.list(mMockListener), "test");
}
- public void testAdvance_noBlankLines() throws Exception {
+ public void testRegexTestCase() {
+ String s = "a (b) ... ok";
+ assertTrue(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ assertFalse(PythonUnitTestResultParser.PATTERN_TWO_LINE_RESULT_FIRST.matcher(s).matches());
+ s = "a (b) ... FAIL";
+ assertTrue(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ s = "a (b) ... ERROR";
+ assertTrue(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ s = "a (b) ... expected failure";
+ assertTrue(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ s = "a (b) ... skipped 'reason foo'";
+ assertTrue(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ s = "a (b)";
+ assertFalse(PythonUnitTestResultParser.PATTERN_ONE_LINE_RESULT.matcher(s).matches());
+ assertTrue(PythonUnitTestResultParser.PATTERN_TWO_LINE_RESULT_FIRST.matcher(s).matches());
+ s = "doc string foo bar ... ok";
+ assertTrue(PythonUnitTestResultParser.PATTERN_TWO_LINE_RESULT_SECOND.matcher(s).matches());
+ s = "docstringfoobar ... ok";
+ assertTrue(PythonUnitTestResultParser.PATTERN_TWO_LINE_RESULT_SECOND.matcher(s).matches());
+ }
+
+ public void testRegexFailMessage() {
+ String s = "FAIL: a (b)";
+ assertTrue(PythonUnitTestResultParser.PATTERN_FAIL_MESSAGE.matcher(s).matches());
+ s = "ERROR: a (b)";
+ assertTrue(PythonUnitTestResultParser.PATTERN_FAIL_MESSAGE.matcher(s).matches());
+ }
+
+ public void testRegexRunSummary() {
+ String s = "Ran 1 test in 1s";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_SUMMARY.matcher(s).matches());
+ s = "Ran 42 tests in 1s";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_SUMMARY.matcher(s).matches());
+ s = "Ran 1 tests in 0.000s";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_SUMMARY.matcher(s).matches());
+ s = "Ran 1 test in 0.001s";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_SUMMARY.matcher(s).matches());
+ s = "Ran 1 test in 12345s";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_SUMMARY.matcher(s).matches());
+ }
+
+ public void testRegexRunResult() {
+ String s = "OK";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_RESULT.matcher(s).matches());
+ s = "OK (expected failures=2) ";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_RESULT.matcher(s).matches());
+ s = "FAILED (errors=1)";
+ assertTrue(PythonUnitTestResultParser.PATTERN_RUN_RESULT.matcher(s).matches());
+ }
+
+ public void testAdvance() throws Exception {
String[] lines = {"hello", "goodbye"};
mParser.init(lines);
boolean result = mParser.advance();
@@ -53,7 +102,7 @@
assertEquals(1, mParser.mLineNum);
}
- public void testAdvance_blankLinesMid() throws Exception {
+ public void testAdvanceWithBlankLine() throws Exception {
String[] lines = {"hello", "", "goodbye"};
mParser.init(lines);
boolean result = mParser.advance();
@@ -63,7 +112,7 @@
assertEquals(2, mParser.mLineNum);
}
- public void testAdvance_atEnd() throws Exception {
+ public void testAdvanceAtEnd() throws Exception {
String[] lines = {"hello"};
mParser.init(lines);
boolean result = mParser.advance();
@@ -73,12 +122,27 @@
assertEquals(0, mParser.mLineNum);
}
- public void testParse_singleTestPass_contiguous() throws Exception {
+ public void testParseNoTests() throws Exception {
+ String[] output = {
+ "",
+ PythonUnitTestResultParser.LINE,
+ "Ran 0 tests in 0.000s",
+ "",
+ "OK"
+ };
+ setRunListenerChecks(0, 0, true);
+
+ replay(mMockListener);
+ mParser.processNewLines(output);
+ verify(mMockListener);
+ }
+
+ public void testParseSingleTestPass() throws Exception {
String[] output = {
"b (a) ... ok",
"",
PythonUnitTestResultParser.LINE,
- "Ran 1 tests in 1s",
+ "Ran 1 test in 1s",
"",
"OK"
};
@@ -92,12 +156,12 @@
verify(mMockListener);
}
- public void testParse_singleTestPassWithExpectedFailure_contiguous() throws Exception {
+ public void testParseSingleTestPassWithExpectedFailure() throws Exception {
String[] output = {
"b (a) ... expected failure",
"",
PythonUnitTestResultParser.LINE,
- "Ran 1 tests in 1s",
+ "Ran 1 test in 1s",
"",
"OK (expected failures=1)"
};
@@ -111,7 +175,7 @@
verify(mMockListener);
}
- public void testParse_multiTestPass_contiguous() throws Exception {
+ public void testParseMultiTestPass() throws Exception {
String[] output = {
"b (a) ... ok",
"d (c) ... ok",
@@ -131,7 +195,7 @@
verify(mMockListener);
}
- public void testParse_multiTestPassWithOneExpectedFailure_contiguous() throws Exception {
+ public void testParseMultiTestPassWithOneExpectedFailure() throws Exception {
String[] output = {
"b (a) ... expected failure",
"d (c) ... ok",
@@ -151,7 +215,7 @@
verify(mMockListener);
}
- public void testParse_multiTestPassWithAllExpectedFailure_contiguous() throws Exception {
+ public void testParseMultiTestPassWithAllExpectedFailure() throws Exception {
String[] output = {
"b (a) ... expected failure",
"d (c) ... expected failure",
@@ -171,39 +235,7 @@
verify(mMockListener);
}
- public void testParse_multiTestPass_pauseDuringRun() throws Exception {
- String[] output1 = {
- "b (a) ... ok"};
- String[] output2 = {
- "d (c) ... ok",
- "",
- PythonUnitTestResultParser.LINE,
- "Ran 2 tests in 1s",
- "",
- "OK"
- };
- TestIdentifier[] ids = new TestIdentifier[2];
- ids[0] = new TestIdentifier("a", "b");
- boolean didPass[] = new boolean[2];
- didPass[0] = true;
- setRunListenerChecks(2, 1000, true);
- setTestIdChecks(ids, didPass);
-
- replay(mMockListener);
- mParser.processNewLines(output1);
-
- reset(mMockListener);
- ids[1] = new TestIdentifier("c", "d");
- didPass[1] = true;
- setTestIdChecks(ids, didPass);
- setRunListenerChecks(2, 1000, true);
-
- replay(mMockListener);
- mParser.processNewLines(output2);
- verify(mMockListener);
- }
-
- public void testParse_singleTestFail_contiguous() throws Exception {
+ public void testParseSingleTestFail() throws Exception {
String[] output = {
"b (a) ... ERROR",
"",
@@ -216,7 +248,7 @@
"ValueError",
"",
PythonUnitTestResultParser.LINE,
- "Ran 1 tests in 1s",
+ "Ran 1 test in 1s",
"",
"FAILED (errors=1)"
};
@@ -230,7 +262,7 @@
verify(mMockListener);
}
- public void testParse_multiTestFailWithExpectedFailure_contiguous() throws Exception {
+ public void testParseMultiTestFailWithExpectedFailure() throws Exception {
String[] output = {
"b (a) ... expected failure",
"d (c) ... ERROR",
@@ -244,7 +276,7 @@
"ValueError",
"",
PythonUnitTestResultParser.LINE,
- "Ran 1 tests in 1s",
+ "Ran 1 test in 1s",
"",
"FAILED (errors=1)"
};
@@ -258,40 +290,154 @@
verify(mMockListener);
}
- public void testParse_singleTestFail_pauseInTraceback() throws Exception {
- String[] output1 = {
- "b (a) ... ERROR",
+ public void testParseSingleTestUnexpectedSuccess() throws Exception {
+ String[] output = {
+ "b (a) ... unexpected success",
+ "",
+ PythonUnitTestResultParser.LINE,
+ "Ran 1 test in 1s",
+ "",
+ "OK (unexpected success=1)",
+ };
+ TestIdentifier[] ids = {new TestIdentifier("a", "b")};
+ boolean[] didPass = {false};
+ setTestIdChecks(ids, didPass);
+ setRunListenerChecks(1, 1000, false);
+
+ replay(mMockListener);
+ mParser.processNewLines(output);
+ verify(mMockListener);
+ }
+
+ public void testParseSingleTestSkipped() throws Exception {
+ String[] output = {
+ "b (a) ... skipped 'reason foo'",
+ "",
+ PythonUnitTestResultParser.LINE,
+ "Ran 1 test in 1s",
+ "",
+ "OK (skipped=1)",
+ };
+ TestIdentifier[] ids = {new TestIdentifier("a", "b")};
+ boolean[] didPass = {false};
+ boolean[] didSkip = {true};
+ setTestIdChecks(ids, didPass, didSkip);
+ setRunListenerChecks(1, 1000, true);
+
+ replay(mMockListener);
+ mParser.processNewLines(output);
+ verify(mMockListener);
+ }
+
+ public void testParseSingleTestPassWithDocString() throws Exception {
+ String[] output = {
+ "b (a)",
+ "doc string foo bar ... ok",
+ "",
+ PythonUnitTestResultParser.LINE,
+ "Ran 1 test in 1s",
+ "",
+ "OK",
+ };
+ TestIdentifier[] ids = {new TestIdentifier("a", "b")};
+ boolean[] didPass = {true};
+ setTestIdChecks(ids, didPass);
+ setRunListenerChecks(1, 1000, true);
+
+ replay(mMockListener);
+ mParser.processNewLines(output);
+ verify(mMockListener);
+ }
+
+ public void testParseSingleTestFailWithDocString() throws Exception {
+ String[] output = {
+ "b (a)",
+ "doc string foo bar ... ERROR",
"",
PythonUnitTestResultParser.EQLINE,
"ERROR: b (a)",
+ "doc string foo bar",
PythonUnitTestResultParser.LINE,
"Traceback (most recent call last):",
- " File \"test_rangelib.py\", line 129, in test_reallyfail"};
- String[] output2 = {
+ " File \"test_rangelib.py\", line 129, in test_reallyfail",
" raise ValueError()",
"ValueError",
"",
PythonUnitTestResultParser.LINE,
- "Ran 1 tests in 1s",
+ "Ran 1 test in 1s",
"",
"FAILED (errors=1)"
};
TestIdentifier[] ids = {new TestIdentifier("a", "b")};
boolean[] didPass = {false};
- setRunListenerChecks(1, 1000, false);
- setTestIdChecks(ids, didPass);
-
- replay(mMockListener);
- mParser.processNewLines(output1);
-
- reset(mMockListener);
- ids[0] = new TestIdentifier("a", "b");
- didPass[0] = false;
setTestIdChecks(ids, didPass);
setRunListenerChecks(1, 1000, false);
replay(mMockListener);
- mParser.processNewLines(output2);
+ mParser.processNewLines(output);
+ verify(mMockListener);
+ }
+
+ public void testParseOneWithEverything() throws Exception {
+ String[] output = {
+ "testError (foo.testFoo) ... ERROR",
+ "testExpectedFailure (foo.testFoo) ... expected failure",
+ "testFail (foo.testFoo) ... FAIL",
+ "testFailWithDocString (foo.testFoo)",
+ "foo bar ... FAIL",
+ "testOk (foo.testFoo) ... ok",
+ "testOkWithDocString (foo.testFoo)",
+ "foo bar ... ok",
+ "testSkipped (foo.testFoo) ... skipped 'reason foo'",
+ "testUnexpectedSuccess (foo.testFoo) ... unexpected success",
+ "",
+ PythonUnitTestResultParser.EQLINE,
+ "ERROR: testError (foo.testFoo)",
+ PythonUnitTestResultParser.LINE,
+ "Traceback (most recent call last):",
+ "File \"foo.py\", line 11, in testError",
+ "self.assertEqual(2+2, 5/0)",
+ "ZeroDivisionError: integer division or modulo by zero",
+ "",
+ PythonUnitTestResultParser.EQLINE,
+ "FAIL: testFail (foo.testFoo)",
+ PythonUnitTestResultParser.LINE,
+ "Traceback (most recent call last):",
+ "File \"foo.py\", line 8, in testFail",
+ "self.assertEqual(2+2, 5)",
+ "AssertionError: 4 != 5",
+ "",
+ PythonUnitTestResultParser.EQLINE,
+ "FAIL: testFailWithDocString (foo.testFoo)",
+ "foo bar",
+ PythonUnitTestResultParser.LINE,
+ "Traceback (most recent call last):",
+ "File \"foo.py\", line 8, in testFail",
+ "self.assertEqual(2+2, 5)",
+ "AssertionError: 4 != 5",
+ "",
+ PythonUnitTestResultParser.LINE,
+ "Ran 8 tests in 1s",
+ "",
+ "FAILED (failures=2, errors=1, skipped=1, expected failures=1, unexpected successes=1)",
+ };
+ TestIdentifier[] ids = {
+ new TestIdentifier("foo.testFoo", "testError"),
+ new TestIdentifier("foo.testFoo", "testExpectedFailure"),
+ new TestIdentifier("foo.testFoo", "testFail"),
+ new TestIdentifier("foo.testFoo", "testFailWithDocString"),
+ new TestIdentifier("foo.testFoo", "testOk"),
+ new TestIdentifier("foo.testFoo", "testOkWithDocString"),
+ new TestIdentifier("foo.testFoo", "testSkipped"),
+ new TestIdentifier("foo.testFoo", "testUnexpectedSuccess")
+ };
+ boolean[] didPass = {false, true, false, false, true, true, false, false};
+ boolean[] didSkip = {false, false, false, false, false, false, true, false};
+ setTestIdChecks(ids, didPass, didSkip);
+ setRunListenerChecks(8, 1000, false);
+
+ replay(mMockListener);
+ mParser.processNewLines(output);
verify(mMockListener);
}
@@ -325,5 +471,28 @@
}
}
}
+
+ private void setTestIdChecks(TestIdentifier[] ids, boolean[] didPass, boolean[] didSkip) {
+ for (int i = 0; i < ids.length; i++) {
+ mMockListener.testStarted(ids[i]);
+ expectLastCall().times(1);
+ if (didPass[i]) {
+ mMockListener.testEnded(ids[i], Collections.<String, String>emptyMap());
+ expectLastCall().times(1);
+ mMockListener.testFailed(eq(ids[i]), (String) anyObject());
+ expectLastCall().andThrow(new AssertionFailedError()).anyTimes();
+ } else if (didSkip[i]) {
+ mMockListener.testIgnored(ids[i]);
+ expectLastCall().times(1);
+ mMockListener.testEnded(ids[i], Collections.<String, String>emptyMap());
+ expectLastCall().times(1);
+ } else {
+ mMockListener.testFailed(eq(ids[i]), (String)anyObject());
+ expectLastCall().times(1);
+ mMockListener.testEnded(ids[i], Collections.<String, String>emptyMap());
+ expectLastCall().times(1);
+ }
+ }
+ }
}
diff --git a/tests/src/com/android/tradefed/testtype/PythonUnitTestRunnerTest.java b/tests/src/com/android/tradefed/testtype/PythonUnitTestRunnerTest.java
index 9d230d4..d86e082 100644
--- a/tests/src/com/android/tradefed/testtype/PythonUnitTestRunnerTest.java
+++ b/tests/src/com/android/tradefed/testtype/PythonUnitTestRunnerTest.java
@@ -17,8 +17,6 @@
package com.android.tradefed.testtype;
import com.android.ddmlib.testrunner.TestIdentifier;
-import com.android.tradefed.config.ConfigurationException;
-import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.result.ITestInvocationListener;
import com.android.tradefed.util.CommandResult;
import com.android.tradefed.util.CommandStatus;
@@ -28,8 +26,7 @@
import org.easymock.EasyMock;
-import java.util.Map;
-
+/** Unit tests for {@link PythonUnitTestRunner}. */
public class PythonUnitTestRunnerTest extends TestCase {
private static final String[] TEST_PASS_STDERR = {
@@ -181,19 +178,18 @@
(TestIdentifier) EasyMock.anyObject(), (String) EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
}
- mMockListener.testEnded(
- (TestIdentifier) EasyMock.anyObject(), (Map<String, String>) EasyMock.anyObject());
+ mMockListener.testEnded((TestIdentifier) EasyMock.anyObject(), EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
if (!testPass) {
mMockListener.testRunFailed((String) EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
}
- mMockListener.testRunEnded(EasyMock.anyLong(), (Map<String, String>) EasyMock.anyObject());
+ mMockListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
}
/** Test execution succeeds and all test cases pass. */
- public void testRunPass() throws DeviceNotAvailableException, ConfigurationException {
+ public void testRunPass() {
IRunUtil mockRunUtil = getMockRunUtil(UnitTestResult.PASS);
setMockListenerExpectTestPass(true);
EasyMock.replay(mMockListener, mockRunUtil);
@@ -202,7 +198,7 @@
}
/** Test execution succeeds and some test cases fail. */
- public void testRunFail() throws DeviceNotAvailableException, ConfigurationException {
+ public void testRunFail() {
IRunUtil mockRunUtil = getMockRunUtil(UnitTestResult.FAIL);
setMockListenerExpectTestPass(false);
EasyMock.replay(mMockListener, mockRunUtil);
@@ -211,7 +207,7 @@
}
/** Test execution fails. */
- public void testRunExecutionFail() throws DeviceNotAvailableException, ConfigurationException {
+ public void testRunExecutionFail() {
IRunUtil mockRunUtil = getMockRunUtil(UnitTestResult.EXECUTION_FAIL);
EasyMock.replay(mockRunUtil);
try {
@@ -224,7 +220,7 @@
}
/** Test execution times out. */
- public void testRunTimeout() throws DeviceNotAvailableException, ConfigurationException {
+ public void testRunTimeout() {
IRunUtil mockRunUtil = getMockRunUtil(UnitTestResult.TIMEOUT);
EasyMock.replay(mockRunUtil);
try {
diff --git a/tests/src/com/android/tradefed/testtype/suite/ITestSuiteMultiTest.java b/tests/src/com/android/tradefed/testtype/suite/ITestSuiteMultiTest.java
index d2bafe1..0dc0fda 100644
--- a/tests/src/com/android/tradefed/testtype/suite/ITestSuiteMultiTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/ITestSuiteMultiTest.java
@@ -111,7 +111,7 @@
mTestSuite.setInvocationContext(mContext);
mTestSuite.setSystemStatusChecker(new ArrayList<>());
-
+ mMockListener.testModuleStarted(EasyMock.anyObject());
mMockListener.testRunStarted("test1", 2);
TestIdentifier test1 =
new TestIdentifier(MultiDeviceStubTest.class.getSimpleName(), "test0");
@@ -122,7 +122,7 @@
mMockListener.testStarted(test2, 0l);
mMockListener.testEnded(test2, 5l, Collections.emptyMap());
mMockListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
-
+ mMockListener.testModuleEnded();
EasyMock.replay(
mMockListener, mMockBuildInfo1, mMockBuildInfo2, mMockDevice1, mMockDevice2);
mTestSuite.run(mMockListener);
diff --git a/tests/src/com/android/tradefed/testtype/suite/ITestSuiteTest.java b/tests/src/com/android/tradefed/testtype/suite/ITestSuiteTest.java
index f16714a..f691558 100644
--- a/tests/src/com/android/tradefed/testtype/suite/ITestSuiteTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/ITestSuiteTest.java
@@ -166,11 +166,13 @@
/** Helper to expect the test run callback. */
private void expectTestRun(ITestInvocationListener listener) {
+ listener.testModuleStarted(EasyMock.anyObject());
listener.testRunStarted(TEST_CONFIG_NAME, 1);
TestIdentifier test = new TestIdentifier(EMPTY_CONFIG, EMPTY_CONFIG);
listener.testStarted(test, 0);
listener.testEnded(test, 5, Collections.emptyMap());
listener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+ listener.testModuleEnded();
}
/** Test for {@link ITestSuite#run(ITestInvocationListener)}. */
@@ -257,6 +259,9 @@
*/
@Test
public void testRun_rebootBeforeModule() throws Exception {
+ List<ISystemStatusChecker> sysChecker = new ArrayList<ISystemStatusChecker>();
+ sysChecker.add(mMockSysChecker);
+ mTestSuite.setSystemStatusChecker(sysChecker);
OptionSetter setter = new OptionSetter(mTestSuite);
setter.setOptionValue("skip-all-system-status-check", "true");
setter.setOptionValue("reboot-per-module", "true");
@@ -275,6 +280,8 @@
*/
@Test
public void testRun_unresponsiveDevice() throws Exception {
+ List<ISystemStatusChecker> sysChecker = new ArrayList<ISystemStatusChecker>();
+ sysChecker.add(mMockSysChecker);
mTestSuite =
new TestSuiteImpl() {
@Override
@@ -297,15 +304,18 @@
mTestSuite.setDevice(mMockDevice);
mTestSuite.setBuild(mMockBuildInfo);
mTestSuite.setInvocationContext(mContext);
+ mTestSuite.setSystemStatusChecker(sysChecker);
OptionSetter setter = new OptionSetter(mTestSuite);
setter.setOptionValue("skip-all-system-status-check", "true");
setter.setOptionValue("reboot-per-module", "true");
EasyMock.expect(mMockDevice.getProperty("ro.build.type")).andReturn("user");
+ mMockListener.testModuleStarted(EasyMock.anyObject());
mMockListener.testRunStarted(TEST_CONFIG_NAME, 1);
EasyMock.expectLastCall().times(1);
mMockListener.testRunFailed("Module test only ran 0 out of 1 expected tests.");
mMockListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
+ mMockListener.testModuleEnded();
replayMocks();
mTestSuite.run(mMockListener);
verifyMocks();
@@ -317,6 +327,8 @@
*/
@Test
public void testRun_runtimeException() throws Exception {
+ List<ISystemStatusChecker> sysChecker = new ArrayList<ISystemStatusChecker>();
+ sysChecker.add(mMockSysChecker);
mTestSuite =
new TestSuiteImpl() {
@Override
@@ -336,6 +348,7 @@
return testConfig;
}
};
+ mTestSuite.setSystemStatusChecker(sysChecker);
mTestSuite.setDevice(mMockDevice);
mTestSuite.setBuild(mMockBuildInfo);
mTestSuite.setInvocationContext(mContext);
@@ -343,11 +356,13 @@
setter.setOptionValue("skip-all-system-status-check", "true");
setter.setOptionValue("reboot-per-module", "true");
EasyMock.expect(mMockDevice.getProperty("ro.build.type")).andReturn("user");
+ mMockListener.testModuleStarted(EasyMock.anyObject());
mMockListener.testRunStarted(TEST_CONFIG_NAME, 1);
EasyMock.expectLastCall().times(1);
mMockListener.testRunFailed("Module test only ran 0 out of 1 expected tests.");
mMockListener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
EasyMock.expectLastCall().times(1);
+ mMockListener.testModuleEnded();
replayMocks();
mTestSuite.run(mMockListener);
verifyMocks();
@@ -377,4 +392,18 @@
assertTrue(test instanceof TestSuiteImpl);
}
}
+
+ /** Test that after being sharded, ITestSuite shows the module runtime that it holds. */
+ @Test
+ public void testGetRuntimeHint() {
+ // default runtime hint is 0, it is only meant to be used for sharding.
+ assertEquals(0l, mTestSuite.getRuntimeHint());
+ mTestSuite = new TestSuiteImpl(5);
+ Collection<IRemoteTest> tests = mTestSuite.split(3);
+ for (IRemoteTest test : tests) {
+ assertTrue(test instanceof TestSuiteImpl);
+ // once sharded modules from the shard start reporting their runtime.
+ assertEquals(60000l, ((TestSuiteImpl) test).getRuntimeHint());
+ }
+ }
}
diff --git a/tests/src/com/android/tradefed/testtype/suite/ModuleDefinitionTest.java b/tests/src/com/android/tradefed/testtype/suite/ModuleDefinitionTest.java
index 5b98c4a..8adad4f 100644
--- a/tests/src/com/android/tradefed/testtype/suite/ModuleDefinitionTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/ModuleDefinitionTest.java
@@ -212,7 +212,8 @@
mMockCleaner.tearDown(EasyMock.eq(mMockDevice), EasyMock.eq(mMockBuildInfo),
EasyMock.isNull());
mMockListener.testRunStarted(EasyMock.eq(MODULE_NAME), EasyMock.eq(1));
- mMockListener.testStarted(EasyMock.anyObject());
+ mMockListener.testStarted(
+ new TestIdentifier(TargetSetupError.class.getCanonicalName(), "preparationError"));
mMockListener.testFailed(EasyMock.anyObject(), EasyMock.contains(exceptionMessage));
mMockListener.testEnded(EasyMock.anyObject(), EasyMock.anyObject());
mMockListener.testRunFailed(EasyMock.contains(exceptionMessage));
diff --git a/tests/src/com/android/tradefed/testtype/suite/TestFailureListenerTest.java b/tests/src/com/android/tradefed/testtype/suite/TestFailureListenerTest.java
index be0c82e..3c6fc52 100644
--- a/tests/src/com/android/tradefed/testtype/suite/TestFailureListenerTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/TestFailureListenerTest.java
@@ -30,7 +30,9 @@
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
+import java.util.ArrayList;
import java.util.Collections;
+import java.util.List;
/** Unit tests for {@link com.android.tradefed.testtype.suite.TestFailureListener} */
@RunWith(JUnit4.class)
@@ -39,15 +41,18 @@
private TestFailureListener mFailureListener;
private ITestInvocationListener mMockListener;
private ITestDevice mMockDevice;
+ private List<ITestDevice> mListDevice;
@Before
public void setUp() {
mMockListener = EasyMock.createMock(ITestInvocationListener.class);
mMockDevice = EasyMock.createStrictMock(ITestDevice.class);
+ mListDevice = new ArrayList<>();
+ mListDevice.add(mMockDevice);
EasyMock.expect(mMockDevice.getSerialNumber()).andStubReturn("SERIAL");
// Create base failure listener with all option ON and default logcat size.
- mFailureListener = new TestFailureListener(mMockListener, mMockDevice,
- true, true, true, true, -1);
+ mFailureListener =
+ new TestFailureListener(mMockListener, mListDevice, true, true, true, true, -1);
}
/**
@@ -63,16 +68,22 @@
EasyMock.expect(mMockDevice.getDeviceDate()).andReturn(startDate);
// Screenshot routine
EasyMock.expect(mMockDevice.getScreenshot()).andReturn(fakeSource);
- mMockListener.testLog(EasyMock.eq(testId.toString() + "-screenshot"),
- EasyMock.eq(LogDataType.PNG), EasyMock.eq(fakeSource));
+ mMockListener.testLog(
+ EasyMock.eq(testId.toString() + "-SERIAL-screenshot"),
+ EasyMock.eq(LogDataType.PNG),
+ EasyMock.eq(fakeSource));
// Bugreport routine
- EasyMock.expect(mMockDevice.getBugreport()).andReturn(fakeSource);
- mMockListener.testLog(EasyMock.eq(testId.toString() + "-bugreport"),
- EasyMock.eq(LogDataType.BUGREPORT), EasyMock.eq(fakeSource));
+ EasyMock.expect(mMockDevice.getBugreportz()).andReturn(fakeSource);
+ mMockListener.testLog(
+ EasyMock.eq(testId.toString() + "-SERIAL-bugreport"),
+ EasyMock.eq(LogDataType.BUGREPORTZ),
+ EasyMock.eq(fakeSource));
// logcat routine
EasyMock.expect(mMockDevice.getLogcatSince(EasyMock.eq(startDate))).andReturn(fakeSource);
- mMockListener.testLog(EasyMock.eq(testId.toString() + "-logcat"),
- EasyMock.eq(LogDataType.LOGCAT), EasyMock.eq(fakeSource));
+ mMockListener.testLog(
+ EasyMock.eq(testId.toString() + "-SERIAL-logcat"),
+ EasyMock.eq(LogDataType.LOGCAT),
+ EasyMock.eq(fakeSource));
// Reboot routine
EasyMock.expect(mMockDevice.getProperty(EasyMock.eq("ro.build.type")))
.andReturn("userdebug");
@@ -90,13 +101,13 @@
*/
@Test
public void testTestFailed_notAvailable() throws Exception {
- mFailureListener = new TestFailureListener(mMockListener, mMockDevice,
- false, true, true, true, -1) {
- @Override
- IRunUtil getRunUtil() {
- return EasyMock.createMock(IRunUtil.class);
- }
- };
+ mFailureListener =
+ new TestFailureListener(mMockListener, mListDevice, false, true, true, true, -1) {
+ @Override
+ IRunUtil getRunUtil() {
+ return EasyMock.createMock(IRunUtil.class);
+ }
+ };
TestIdentifier testId = new TestIdentifier("com.fake", "methodfake");
final String trace = "oups it failed";
final byte[] fakeData = "fakeData".getBytes();
@@ -107,8 +118,10 @@
EasyMock.expect(mMockDevice.getScreenshot()).andThrow(dnae);
// logcat routine
EasyMock.expect(mMockDevice.getLogcat(EasyMock.anyInt())).andReturn(fakeSource);
- mMockListener.testLog(EasyMock.eq(testId.toString() + "-logcat"),
- EasyMock.eq(LogDataType.LOGCAT), EasyMock.eq(fakeSource));
+ mMockListener.testLog(
+ EasyMock.eq(testId.toString() + "-SERIAL-logcat"),
+ EasyMock.eq(LogDataType.LOGCAT),
+ EasyMock.eq(fakeSource));
// Reboot routine
EasyMock.expect(mMockDevice.getProperty(EasyMock.eq("ro.build.type")))
.andReturn("userdebug");
@@ -126,8 +139,8 @@
*/
@Test
public void testTestFailed_userBuild() throws Exception {
- mFailureListener = new TestFailureListener(mMockListener, mMockDevice,
- false, false, false, true, -1);
+ mFailureListener =
+ new TestFailureListener(mMockListener, mListDevice, false, false, false, true, -1);
final String trace = "oups it failed";
TestIdentifier testId = new TestIdentifier("com.fake", "methodfake");
EasyMock.expect(mMockDevice.getProperty(EasyMock.eq("ro.build.type"))).andReturn("user");
@@ -137,4 +150,29 @@
mFailureListener.testEnded(testId, Collections.emptyMap());
EasyMock.verify(mMockListener, mMockDevice);
}
+
+ /**
+ * Test when a test failure occurs during a multi device run. Each device should capture the
+ * logs.
+ */
+ @Test
+ public void testFailed_multiDevice() throws Exception {
+ ITestDevice device2 = EasyMock.createMock(ITestDevice.class);
+ mListDevice.add(device2);
+ mFailureListener =
+ new TestFailureListener(mMockListener, mListDevice, false, false, false, true, -1);
+ final String trace = "oups it failed";
+ TestIdentifier testId = new TestIdentifier("com.fake", "methodfake");
+ EasyMock.expect(mMockDevice.getProperty(EasyMock.eq("ro.build.type"))).andReturn("debug");
+ mMockDevice.reboot();
+ EasyMock.expect(device2.getSerialNumber()).andStubReturn("SERIAL2");
+ EasyMock.expect(device2.getProperty(EasyMock.eq("ro.build.type"))).andReturn("debug");
+ device2.reboot();
+
+ EasyMock.replay(mMockListener, mMockDevice, device2);
+ mFailureListener.testStarted(testId);
+ mFailureListener.testFailed(testId, trace);
+ mFailureListener.testEnded(testId, Collections.emptyMap());
+ EasyMock.verify(mMockListener, mMockDevice, device2);
+ }
}
diff --git a/src/com/android/tradefed/testtype/suite/TestSuiteStub.java b/tests/src/com/android/tradefed/testtype/suite/TestSuiteStub.java
similarity index 100%
rename from src/com/android/tradefed/testtype/suite/TestSuiteStub.java
rename to tests/src/com/android/tradefed/testtype/suite/TestSuiteStub.java
diff --git a/tests/src/com/android/tradefed/testtype/suite/TfSuiteRunnerTest.java b/tests/src/com/android/tradefed/testtype/suite/TfSuiteRunnerTest.java
index 245b916..240e508 100644
--- a/tests/src/com/android/tradefed/testtype/suite/TfSuiteRunnerTest.java
+++ b/tests/src/com/android/tradefed/testtype/suite/TfSuiteRunnerTest.java
@@ -44,6 +44,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
+import java.util.List;
/**
* Unit tests for {@link TfSuiteRunner}.
@@ -181,8 +182,10 @@
mRunner.setSystemStatusChecker(new ArrayList<>());
mRunner.setInvocationContext(new InvocationContext());
// runs the expanded suite
+ listener.testModuleStarted(EasyMock.anyObject());
listener.testRunStarted("suite/stub1", 0);
listener.testRunEnded(EasyMock.anyLong(), EasyMock.anyObject());
+ listener.testModuleEnded();
EasyMock.replay(listener);
mRunner.run(listener);
EasyMock.verify(listener);
@@ -205,9 +208,11 @@
File zipDir = FileUtil.getFileForPath(tmpDir, "suite");
FileUtil.mkdirsRWX(zipDir);
- // Create a test config inside a zip.
+ // Create 2 test configs inside a zip.
File testConfig = new File(zipDir, "test1.config");
FileUtil.writeToFile(TEST_CONFIG, testConfig);
+ File testConfig2 = new File(zipDir, "test2.config");
+ FileUtil.writeToFile(TEST_CONFIG, testConfig2);
additionalTestsZipFile = ZipUtil.createZip(zipDir);
OptionSetter setter = new OptionSetter(mRunner);
@@ -221,13 +226,14 @@
EasyMock.replay(deviceBuildInfo);
LinkedHashMap<String, IConfiguration> configMap = mRunner.loadTests();
- assertEquals(3, configMap.size());
- assertTrue(configMap.containsKey("suite/stub1"));
- assertTrue(configMap.containsKey("suite/stub2"));
- assertTrue(
- configMap.containsKey(
- FileUtil.getFileForPath(deviceTestDir, "suite/test1.config")
- .getAbsolutePath()));
+ assertEquals(4, configMap.size());
+ // The keySet should be stable and always ensure the same order of files.
+ List<String> keyList = new ArrayList<>(configMap.keySet());
+ // test1 and test2 name was sanitized to look like the included configs.
+ assertEquals("suite/test1", keyList.get(0));
+ assertEquals("suite/test2", keyList.get(1));
+ assertEquals("suite/stub1", keyList.get(2));
+ assertEquals("suite/stub2", keyList.get(3));
EasyMock.verify(deviceBuildInfo);
} finally {
FileUtil.recursiveDelete(deviceTestDir);
diff --git a/tests/src/com/android/tradefed/util/StreamUtilTest.java b/tests/src/com/android/tradefed/util/StreamUtilTest.java
index 374814a..83b5f53 100644
--- a/tests/src/com/android/tradefed/util/StreamUtilTest.java
+++ b/tests/src/com/android/tradefed/util/StreamUtilTest.java
@@ -38,14 +38,15 @@
public void testGetByteArrayListFromSource() throws Exception {
final String contents = "this is a string";
final byte[] contentBytes = contents.getBytes();
- final InputStreamSource source = new ByteArrayInputStreamSource(contentBytes);
- final InputStream stream = source.createInputStream();
- final ByteArrayList output = StreamUtil.getByteArrayListFromStream(stream);
- final byte[] outputBytes = output.getContents();
+ try (final InputStreamSource source = new ByteArrayInputStreamSource(contentBytes)) {
+ final InputStream stream = source.createInputStream();
+ final ByteArrayList output = StreamUtil.getByteArrayListFromStream(stream);
+ final byte[] outputBytes = output.getContents();
- assertEquals(contentBytes.length, outputBytes.length);
- for (int i = 0; i < contentBytes.length; ++i) {
- assertEquals(contentBytes[i], outputBytes[i]);
+ assertEquals(contentBytes.length, outputBytes.length);
+ for (int i = 0; i < contentBytes.length; ++i) {
+ assertEquals(contentBytes[i], outputBytes[i]);
+ }
}
}
@@ -70,11 +71,11 @@
*/
public void testGetStringFromSource() throws Exception {
final String contents = "this is a string";
- final InputStreamSource source = new ByteArrayInputStreamSource(contents.getBytes());
- final InputStream stream = source.createInputStream();
- final String output = StreamUtil.getStringFromStream(stream);
-
- assertEquals(contents, output);
+ try (InputStreamSource source = new ByteArrayInputStreamSource(contents.getBytes())) {
+ final InputStream stream = source.createInputStream();
+ final String output = StreamUtil.getStringFromStream(stream);
+ assertEquals(contents, output);
+ }
}
/**
diff --git a/tests/src/com/android/tradefed/util/SystemUtilTest.java b/tests/src/com/android/tradefed/util/SystemUtilTest.java
index 5b670af..104fa22 100644
--- a/tests/src/com/android/tradefed/util/SystemUtilTest.java
+++ b/tests/src/com/android/tradefed/util/SystemUtilTest.java
@@ -14,20 +14,20 @@
* limitations under the License.
*/
package com.android.tradefed.util;
-
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import com.android.tradefed.build.IBuildInfo;
+
+import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.junit.Test;
+import java.util.ArrayList;
+import java.util.List;
/** Unit tests for {@link SystemUtil} */
@RunWith(JUnit4.class)
@@ -38,7 +38,7 @@
* environment variables.
*/
@Test
- public void testGetTestCasesDirs() throws IOException {
+ public void testGetExternalTestCasesDirs() throws IOException {
File targetOutDir = null;
File hostOutDir = null;
try {
@@ -48,10 +48,12 @@
SystemUtil.singleton = Mockito.mock(SystemUtil.class);
Mockito.when(SystemUtil.singleton.getEnv(SystemUtil.ENV_ANDROID_TARGET_OUT_TESTCASES))
.thenReturn(targetOutDir.getAbsolutePath());
+ Mockito.when(SystemUtil.singleton.getEnv(SystemUtil.ENV_ANDROID_HOST_OUT_TESTCASES))
+ .thenReturn(hostOutDir.getAbsolutePath());
- Set<File> testCasesDirs = new HashSet<File>(SystemUtil.getTestCasesDirs(null));
- assertTrue(testCasesDirs.contains(targetOutDir));
- assertTrue(!testCasesDirs.contains(hostOutDir));
+ List<File> testCasesDirs = new ArrayList<File>(SystemUtil.getExternalTestCasesDirs());
+ assertTrue(testCasesDirs.get(0).equals(targetOutDir));
+ assertTrue(testCasesDirs.get(1).equals(hostOutDir));
} finally {
FileUtil.recursiveDelete(targetOutDir);
FileUtil.recursiveDelete(hostOutDir);
@@ -63,14 +65,15 @@
* environment variable is set or the directory does not exist.
*/
@Test
- public void testGetTestCasesDirsNoDir() {
+ public void testGetExternalTestCasesDirsNoDir() {
File targetOutDir = new File("/path/not/exist_1");
SystemUtil.singleton = Mockito.mock(SystemUtil.class);
Mockito.when(SystemUtil.singleton.getEnv(SystemUtil.ENV_ANDROID_TARGET_OUT_TESTCASES))
.thenReturn(targetOutDir.getAbsolutePath());
-
- Set<File> testCasesDirs = new HashSet<File>(SystemUtil.getTestCasesDirs(null));
- assertEquals(testCasesDirs.size(), 0);
+ Mockito.when(SystemUtil.singleton.getEnv(SystemUtil.ENV_ANDROID_HOST_OUT_TESTCASES))
+ .thenReturn(null);
+ List<File> testCasesDirs = new ArrayList<File>(SystemUtil.getExternalTestCasesDirs());
+ assertEquals(0, testCasesDirs.size());
}
}