Merge "Backport totalpss listener to qt-dev." into qt-dev
diff --git a/libraries/device-collectors/src/main/java/android/device/collectors/BaseCollectionListener.java b/libraries/device-collectors/src/main/java/android/device/collectors/BaseCollectionListener.java
index fc45c1e..8003aed 100644
--- a/libraries/device-collectors/src/main/java/android/device/collectors/BaseCollectionListener.java
+++ b/libraries/device-collectors/src/main/java/android/device/collectors/BaseCollectionListener.java
@@ -16,12 +16,14 @@
 package android.device.collectors;
 
 import android.os.Bundle;
+import android.util.Log;
 
 import androidx.annotation.VisibleForTesting;
 
 import com.android.helpers.ICollectorHelper;
 
 import org.junit.runner.Description;
+import org.junit.runner.notification.Failure;
 import org.junit.runner.Result;
 
 import java.util.Map;
@@ -42,7 +44,11 @@
     protected ICollectorHelper mHelper;
     // Collect per run if it is set to true otherwise collect per test.
     public static final String COLLECT_PER_RUN = "per_run";
+    // Skip failure metrics collection if this flag is set to true.
+    public static final String SKIP_TEST_FAILURE_METRICS = "skip_test_failure_metrics";
     protected boolean mIsCollectPerRun;
+    protected boolean mSkipTestFailureMetrics;
+    private boolean mIsTestFailed = false;
 
     public BaseCollectionListener() {
         super();
@@ -58,6 +64,8 @@
     public void onTestRunStart(DataRecord runData, Description description) {
         Bundle args = getArgsBundle();
         mIsCollectPerRun = "true".equals(args.getString(COLLECT_PER_RUN));
+        // By default this flag is set to false to collect the metrics on test failure.
+        mSkipTestFailureMetrics = "true".equals(args.getString(SKIP_TEST_FAILURE_METRICS));
 
         // Setup additional args before starting the collection.
         setupAdditionalArgs();
@@ -69,18 +77,32 @@
     }
 
     @Override
-    public void onTestStart(DataRecord testData, Description description) {
+    public final void onTestStart(DataRecord testData, Description description) {
+        mIsTestFailed = false;
         if (!mIsCollectPerRun) {
             mHelper.startCollecting();
         }
     }
 
     @Override
-    public void onTestEnd(DataRecord testData, Description description) {
+    public void onTestFail(DataRecord testData, Description description, Failure failure) {
+        mIsTestFailed = true;
+    }
+
+    @Override
+    public final void onTestEnd(DataRecord testData, Description description) {
         if (!mIsCollectPerRun) {
-            Map<String, T> metrics = mHelper.getMetrics();
-            for (Map.Entry<String, T> entry : metrics.entrySet()) {
-                testData.addStringMetric(entry.getKey(), entry.getValue().toString());
+            // Skip adding the metrics collected during the test failure
+            // if the skip metrics on test failure flag is enabled and the
+            // current test is failed.
+            if (mSkipTestFailureMetrics && mIsTestFailed) {
+                Log.i(getTag(), "Skipping the metric collection.");
+            } else {
+                // Collect the metrics.
+                Map<String, T> metrics = mHelper.getMetrics();
+                for (Map.Entry<String, T> entry : metrics.entrySet()) {
+                    testData.addStringMetric(entry.getKey(), entry.getValue().toString());
+                }
             }
             mHelper.stopCollecting();
         }
@@ -98,15 +120,14 @@
     }
 
     /**
-     * To add listener specific extra args implement this method in the sub class
-     * and add the listener specific args.
+     * To add listener specific extra args implement this method in the sub class and add the
+     * listener specific args.
      */
     public void setupAdditionalArgs() {
-       // NO-OP by default
+        // NO-OP by default
     }
 
     protected void createHelperInstance(ICollectorHelper helper) {
         mHelper = helper;
     }
-
 }
diff --git a/libraries/device-collectors/src/test/java/android/device/collectors/BaseCollectionListenerTest.java b/libraries/device-collectors/src/test/java/android/device/collectors/BaseCollectionListenerTest.java
index ff7f8f8..9e23918 100644
--- a/libraries/device-collectors/src/test/java/android/device/collectors/BaseCollectionListenerTest.java
+++ b/libraries/device-collectors/src/test/java/android/device/collectors/BaseCollectionListenerTest.java
@@ -25,6 +25,7 @@
 import org.junit.runner.Description;
 import org.junit.runner.Result;
 import org.junit.runner.RunWith;
+import org.junit.runner.notification.Failure;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
@@ -65,8 +66,8 @@
     }
 
     /**
-     * Verify start and stop collection happens only during test run started
-     * and test run ended when per_run option is enabled.
+     * Verify start and stop collection happens only during test run started and test run ended when
+     * per_run option is enabled.
      */
     @Test
     public void testPerRunFlow() throws Exception {
@@ -85,9 +86,8 @@
     }
 
     /**
-     * Verify start and stop collection happens before and after each test
-     * and not during test run started and test run ended when per_run option is
-     * disabled.
+     * Verify start and stop collection happens before and after each test and not during test run
+     * started and test run ended when per_run option is disabled.
      */
     @Test
     public void testPerTestFlow() throws Exception {
@@ -100,18 +100,20 @@
         mListener.onTestStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
         verify(helper, times(1)).startCollecting();
         mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).getMetrics();
         verify(helper, times(1)).stopCollecting();
         mListener.onTestStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
         verify(helper, times(2)).startCollecting();
         mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
         verify(helper, times(2)).stopCollecting();
+        verify(helper, times(2)).getMetrics();
         mListener.onTestRunEnd(mListener.createDataRecord(), new Result());
         verify(helper, times(2)).stopCollecting();
     }
 
     /**
-     * Verify start and stop collection happens before and after each test
-     * and not during test run started and test run ended by default.
+     * Verify start and stop collection happens before and after each test and not during test run
+     * started and test run ended by default.
      */
     @Test
     public void testDefaultOptionFlow() throws Exception {
@@ -131,4 +133,109 @@
         mListener.onTestRunEnd(mListener.createDataRecord(), new Result());
         verify(helper, times(2)).stopCollecting();
     }
+
+    /**
+     * Verify metrics is collected when skip on test failure is explictly set
+     * to false.
+     */
+    @Test
+    public void testPerTestFailureFlowNotCollectMetrics() throws Exception {
+        Bundle b = new Bundle();
+        b.putString(BaseCollectionListener.COLLECT_PER_RUN, "false");
+        b.putString(BaseCollectionListener.SKIP_TEST_FAILURE_METRICS, "false");
+        mListener = initListener(b);
+
+        mListener.onTestRunStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(0)).startCollecting();
+        mListener.onTestStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).startCollecting();
+        Failure failureDesc = new Failure(Description.createSuiteDescription("run"),
+                new Exception());
+        mListener.testFailure(failureDesc);
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).getMetrics();
+        verify(helper, times(1)).stopCollecting();
+    }
+
+    /**
+     * Verify default behaviour to collect the metrics on test failure.
+     */
+    @Test
+    public void testPerTestFailureFlowDefault() throws Exception {
+        Bundle b = new Bundle();
+        b.putString(BaseCollectionListener.COLLECT_PER_RUN, "false");
+        mListener = initListener(b);
+
+        mListener.onTestRunStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(0)).startCollecting();
+        mListener.onTestStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).startCollecting();
+        Failure failureDesc = new Failure(Description.createSuiteDescription("run"),
+                new Exception());
+        mListener.testFailure(failureDesc);
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        // Metrics should be called by default on test failure by default.
+        verify(helper, times(1)).getMetrics();
+        verify(helper, times(1)).stopCollecting();
+    }
+
+    /**
+     * Verify metrics collection is skipped if the skip on failure metrics
+     * is enabled and if the test is failed.
+     */
+    @Test
+    public void testPerTestFailureSkipMetrics() throws Exception {
+        Bundle b = new Bundle();
+        b.putString(BaseCollectionListener.COLLECT_PER_RUN, "false");
+        b.putString(BaseCollectionListener.SKIP_TEST_FAILURE_METRICS, "true");
+        mListener = initListener(b);
+
+        mListener.onTestRunStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(0)).startCollecting();
+        mListener.onTestStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).startCollecting();
+        Failure failureDesc = new Failure(Description.createSuiteDescription("run"),
+                new Exception());
+        mListener.testFailure(failureDesc);
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        // Metrics should not be collected.
+        verify(helper, times(0)).getMetrics();
+        verify(helper, times(1)).stopCollecting();
+    }
+
+    /**
+     * Verify metrics not collected for test failure in between two test that
+     * succeeded when skip metrics on test failure is enabled.
+     */
+    @Test
+    public void testInterleavingTestFailureMetricsSkip() throws Exception {
+        Bundle b = new Bundle();
+        b.putString(BaseCollectionListener.COLLECT_PER_RUN, "false");
+        b.putString(BaseCollectionListener.SKIP_TEST_FAILURE_METRICS, "true");
+        mListener = initListener(b);
+
+        mListener.onTestRunStart(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(0)).startCollecting();
+        mListener.testStarted(FAKE_DESCRIPTION);
+        verify(helper, times(1)).startCollecting();
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(1)).getMetrics();
+        verify(helper, times(1)).stopCollecting();
+
+        mListener.testStarted(FAKE_DESCRIPTION);
+        verify(helper, times(2)).startCollecting();
+        Failure failureDesc = new Failure(Description.createSuiteDescription("run"),
+                new Exception());
+        mListener.testFailure(failureDesc);
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        // Metric collection should not be done on failure.
+        verify(helper, times(1)).getMetrics();
+        verify(helper, times(2)).stopCollecting();
+
+        mListener.testStarted(FAKE_DESCRIPTION);
+        verify(helper, times(3)).startCollecting();
+        mListener.onTestEnd(mListener.createDataRecord(), FAKE_DESCRIPTION);
+        verify(helper, times(2)).getMetrics();
+        verify(helper, times(3)).stopCollecting();
+    }
 }