Added test-level support to StatsdListener.

Also streamlined argument and variable names.

Bug: 120569785
Test: atest CollectorDeviceLibPlatformTest (updated)
Change-Id: Id78614bb35dd74c7f3bc1ddf96716ba8db140f12
(cherry picked from commit 9549c28580b27a9182a70c51ce3b274f7f9e7593)
diff --git a/libraries/device-collectors/src/main/platform-collectors/src/android/device/collectors/StatsdListener.java b/libraries/device-collectors/src/main/platform-collectors/src/android/device/collectors/StatsdListener.java
index 8b58662..8652525 100644
--- a/libraries/device-collectors/src/main/platform-collectors/src/android/device/collectors/StatsdListener.java
+++ b/libraries/device-collectors/src/main/platform-collectors/src/android/device/collectors/StatsdListener.java
@@ -50,8 +50,8 @@
 public class StatsdListener extends BaseMetricListener {
     private static final String LOG_TAG = StatsdListener.class.getSimpleName();
 
-    // TODO(harrytczhang): Add option and support for per-test collection.
-    static final String OPTION_CONFIGS_TEST_RUN = "statsd-configs-per-run";
+    static final String OPTION_CONFIGS_RUN_LEVEL = "statsd-configs-run-level";
+    static final String OPTION_CONFIGS_TEST_LEVEL = "statsd-configs-test-level";
 
     // Sub-directory within the test APK's assets/ directory to look for configs.
     static final String CONFIG_SUB_DIRECTORY = "statsd-configs";
@@ -61,29 +61,39 @@
     // Parent directory for all statsd reports.
     static final String REPORT_PATH_ROOT = "statsd-reports";
     // Sub-directory for test run reports.
-    static final String REPORT_PATH_TEST_RUN = "test-run";
+    static final String REPORT_PATH_RUN_LEVEL = "run-level";
+    // Sub-directory for test-level reports.
+    static final String REPORT_PATH_TEST_LEVEL = "test-level";
+    // Prefix template for test-level metric report files.
+    static final String TEST_PREFIX_TEMPLATE = "%s-%d_";
 
-    // Configs used for tests and test runs, respectively.
-    private Map<String, StatsdConfig> mTestRunConfigs = new HashMap<String, StatsdConfig>();
+    // Configs used for the test run and each test, respectively.
+    private Map<String, StatsdConfig> mRunLevelConfigs = new HashMap<String, StatsdConfig>();
+    private Map<String, StatsdConfig> mTestLevelConfigs = new HashMap<String, StatsdConfig>();
 
     // Map to associate config names with their config Ids.
-    private Map<String, Long> mTestRunConfigIds = new HashMap<String, Long>();
+    private Map<String, Long> mRunLevelConfigIds = new HashMap<String, Long>();
+    private Map<String, Long> mTestLevelConfigIds = new HashMap<String, Long>();
+
+    // "Counter" for test iterations, keyed by the display name of each test's description.
+    private Map<String, Integer> mTestIterations = new HashMap<String, Integer>();
 
     // Cached stats manager instance.
     private StatsManager mStatsManager;
 
-    /** Registers the test run configs with {@link StatsManager} before the test run starts. */
+    /** Register the test run configs with {@link StatsManager} before the test run starts. */
     @Override
     public void onTestRunStart(DataRecord runData, Description description) {
         // The argument parsing has to be performed here as the instrumentation has not yet been
         // registered when the constructor of this class is called.
-        mTestRunConfigs.putAll(getConfigsFromOption(OPTION_CONFIGS_TEST_RUN));
+        mRunLevelConfigs.putAll(getConfigsFromOption(OPTION_CONFIGS_RUN_LEVEL));
+        mTestLevelConfigs.putAll(getConfigsFromOption(OPTION_CONFIGS_TEST_LEVEL));
 
-        mTestRunConfigIds = registerConfigsWithStatsManager(mTestRunConfigs);
+        mRunLevelConfigIds = registerConfigsWithStatsManager(mRunLevelConfigs);
     }
 
     /**
-     * Dumps the test run stats reports to the test run subdirectory after the test run ends.
+     * Dump the test run stats reports to the test run subdirectory after the test run ends.
      *
      * <p>Dumps the stats regardless of whether all the tests pass.
      */
@@ -91,12 +101,37 @@
     public void onTestRunEnd(DataRecord runData, Result result) {
         Map<String, File> configReports =
                 pullReportsAndRemoveConfigs(
-                        mTestRunConfigIds, Paths.get(REPORT_PATH_ROOT, REPORT_PATH_TEST_RUN));
+                        mRunLevelConfigIds, Paths.get(REPORT_PATH_ROOT, REPORT_PATH_RUN_LEVEL), "");
         for (String configName : configReports.keySet()) {
             runData.addFileMetric(configName, configReports.get(configName));
         }
     }
 
+    /** Register the test-level configs with {@link StatsManager} before each test starts. */
+    @Override
+    public void onTestStart(DataRecord testData, Description description) {
+        mTestIterations.computeIfPresent(description.getDisplayName(), (name, count) -> count + 1);
+        mTestIterations.computeIfAbsent(description.getDisplayName(), name -> 1);
+        mTestLevelConfigIds = registerConfigsWithStatsManager(mTestLevelConfigs);
+    }
+
+    /**
+     * Dump the test-level stats reports to the test-specific subdirectory after the test ends.
+     *
+     * <p>Dumps the stats regardless of whether the test passes.
+     */
+    @Override
+    public void onTestEnd(DataRecord testData, Description description) {
+        Map<String, File> configReports =
+                pullReportsAndRemoveConfigs(
+                        mTestLevelConfigIds,
+                        Paths.get(REPORT_PATH_ROOT, REPORT_PATH_TEST_LEVEL),
+                        getTestPrefix(description));
+        for (String configName : configReports.keySet()) {
+            testData.addFileMetric(configName, configReports.get(configName));
+        }
+    }
+
     /**
      * Register a set of statsd configs and return their config IDs in a {@link Map}.
      *
@@ -133,10 +168,12 @@
      * @param configIds Map of (config name, config Id)
      * @param directory relative directory on external storage to dump the report in. Each report
      *     will be named after its config.
+     * @param prefix a prefix to prepend to the metric report file name, used to differentiate
+     *     between tests and left empty for the test run.
      * @return Map of (config name, config report file)
      */
     private Map<String, File> pullReportsAndRemoveConfigs(
-            final Map<String, Long> configIds, Path directory) {
+            final Map<String, Long> configIds, Path directory, String prefix) {
         File externalStorage = Environment.getExternalStorageDirectory();
         File saveDirectory = new File(externalStorage, directory.toString());
         if (!saveDirectory.isDirectory()) {
@@ -151,7 +188,7 @@
                 reportList =
                         ConfigMetricsReportList.parseFrom(
                                 getStatsReports(configIds.get(configName)));
-                File reportFile = new File(saveDirectory, configName + PROTO_EXTENSION);
+                File reportFile = new File(saveDirectory, prefix + configName + PROTO_EXTENSION);
                 writeToFile(reportFile, reportList.toByteArray());
                 savedConfigFiles.put(configName, reportFile);
             } catch (StatsUnavailableException e) {
@@ -224,6 +261,22 @@
         return mStatsManager;
     }
 
+    /** Get the prefix for a test + iteration combination to differentiate it from other files. */
+    @VisibleForTesting
+    String getTestPrefix(Description description) {
+        return String.format(
+                TEST_PREFIX_TEMPLATE,
+                formatDescription(description),
+                mTestIterations.get(description.getDisplayName()));
+    }
+
+    /** Format a JUnit {@link Description} to a desired string format. */
+    @VisibleForTesting
+    String formatDescription(Description description) {
+        return String.join(
+                "#", description.getTestClass().getCanonicalName(), description.getMethodName());
+    }
+
     /**
      * Forwarding logic for {@link StatsManager} as it is final and cannot be mocked.
      *
diff --git a/libraries/device-collectors/src/test/platform/android/device/collectors/StatsdListenerTest.java b/libraries/device-collectors/src/test/platform/android/device/collectors/StatsdListenerTest.java
index 411623c..e704a53 100644
--- a/libraries/device-collectors/src/test/platform/android/device/collectors/StatsdListenerTest.java
+++ b/libraries/device-collectors/src/test/platform/android/device/collectors/StatsdListenerTest.java
@@ -47,6 +47,7 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.nio.file.Paths;
+import java.util.Arrays;
 import java.util.Map;
 
 /** Unit tests for {@link StatsdListener}. */
@@ -59,6 +60,12 @@
     private static final long CONFIG_ID_1 = 1;
     private static final long CONFIG_ID_2 = 2;
 
+    private static class DummyTest {}
+
+    private static final Class<?> TEST_CLASS = DummyTest.class;
+    private static final String TEST_METHOD_NAME_1 = "testMethodOne";
+    private static final String TEST_METHOD_NAME_2 = "testMethodTwo";
+
     private static final StatsdConfig CONFIG_1 =
             StatsdConfig.newBuilder().setId(CONFIG_ID_1).build();
     private static final StatsdConfig CONFIG_2 =
@@ -98,10 +105,10 @@
 
     /** Test that the collector has correct interactions with statsd for per-run collection. */
     @Test
-    public void testPerRunCollection_statsdInteraction() throws Exception {
+    public void testRunLevelCollection_statsdInteraction() throws Exception {
         doReturn(CONFIG_MAP)
                 .when(mListener)
-                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_RUN));
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_RUN_LEVEL));
 
         DataRecord runData = new DataRecord();
         Description description = Description.createSuiteDescription("TestRun");
@@ -119,10 +126,10 @@
 
     /** Test that the collector dumps reports and report them as metrics. */
     @Test
-    public void testPerRunCollection_metrics() throws Exception {
+    public void testRunLevelCollection_metrics() throws Exception {
         doReturn(CONFIG_MAP)
                 .when(mListener)
-                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_RUN));
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_RUN_LEVEL));
 
         // Mock the DataRecord class as its content is not directly visible.
         DataRecord runData = mock(DataRecord.class);
@@ -133,42 +140,291 @@
 
         verify(mListener, times(1))
                 .writeToFile(
-                        getFileNameMatcher(
+                        getExactFileNameMatcher(
                                 Paths.get(
                                                 StatsdListener.REPORT_PATH_ROOT,
-                                                StatsdListener.REPORT_PATH_TEST_RUN)
+                                                StatsdListener.REPORT_PATH_RUN_LEVEL)
                                         .toString(),
                                 CONFIG_NAME_1 + StatsdListener.PROTO_EXTENSION),
                         any());
         verify(runData, times(1))
                 .addFileMetric(
                         eq(CONFIG_NAME_1),
-                        getFileNameMatcher(
+                        getExactFileNameMatcher(
                                 Paths.get(
                                                 StatsdListener.REPORT_PATH_ROOT,
-                                                StatsdListener.REPORT_PATH_TEST_RUN)
+                                                StatsdListener.REPORT_PATH_RUN_LEVEL)
                                         .toString(),
                                 CONFIG_NAME_1 + StatsdListener.PROTO_EXTENSION));
         verify(mListener, times(1))
                 .writeToFile(
-                        getFileNameMatcher(
+                        getExactFileNameMatcher(
                                 Paths.get(
                                                 StatsdListener.REPORT_PATH_ROOT,
-                                                StatsdListener.REPORT_PATH_TEST_RUN)
+                                                StatsdListener.REPORT_PATH_RUN_LEVEL)
                                         .toString(),
                                 CONFIG_NAME_2 + StatsdListener.PROTO_EXTENSION),
                         any());
         verify(runData, times(1))
                 .addFileMetric(
                         eq(CONFIG_NAME_2),
-                        getFileNameMatcher(
+                        getExactFileNameMatcher(
                                 Paths.get(
                                                 StatsdListener.REPORT_PATH_ROOT,
-                                                StatsdListener.REPORT_PATH_TEST_RUN)
+                                                StatsdListener.REPORT_PATH_RUN_LEVEL)
                                         .toString(),
                                 CONFIG_NAME_2 + StatsdListener.PROTO_EXTENSION));
     }
 
+    /** Test that the collector has correct interactions with statsd for per-test collection. */
+    @Test
+    public void testTestLevelCollection_statsdInteraction() throws Exception {
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_LEVEL));
+
+        DataRecord testData = new DataRecord();
+        Description description = Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        // onTestRunStart(...) has to be called because the arguments are parsed here.
+        mListener.onTestRunStart(
+                new DataRecord(), Description.createSuiteDescription("Placeholder"));
+
+        mListener.onTestStart(testData, description);
+        verify(mListener, times(1)).addStatsConfig(eq(CONFIG_ID_1), eq(CONFIG_1.toByteArray()));
+        verify(mListener, times(1)).addStatsConfig(eq(CONFIG_ID_2), eq(CONFIG_2.toByteArray()));
+
+        mListener.onTestEnd(testData, description);
+        verify(mListener, times(1)).getStatsReports(eq(CONFIG_ID_1));
+        verify(mListener, times(1)).getStatsReports(eq(CONFIG_ID_2));
+        verify(mListener, times(1)).removeStatsConfig(eq(CONFIG_ID_1));
+        verify(mListener, times(1)).removeStatsConfig(eq(CONFIG_ID_2));
+
+        mListener.onTestRunEnd(new DataRecord(), new Result());
+    }
+
+    /** Test that the collector dumps report and reports them as metric per test. */
+    @Test
+    public void testTestLevelCollection_metrics() throws Exception {
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_LEVEL));
+
+        DataRecord testData = mock(DataRecord.class);
+        Description description = Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        // onTestRunStart(...) has to be called because the arguments are parsed here.
+        mListener.onTestRunStart(
+                new DataRecord(), Description.createSuiteDescription("Placeholder"));
+
+        mListener.onTestStart(testData, description);
+        mListener.onTestEnd(testData, description);
+
+        verify(mListener, times(1))
+                .writeToFile(
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                CONFIG_NAME_1,
+                                StatsdListener.PROTO_EXTENSION),
+                        any());
+        verify(testData, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                CONFIG_NAME_1,
+                                StatsdListener.PROTO_EXTENSION));
+        verify(mListener, times(1))
+                .writeToFile(
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                CONFIG_NAME_2,
+                                StatsdListener.PROTO_EXTENSION),
+                        any());
+        verify(testData, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_2),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                CONFIG_NAME_2,
+                                StatsdListener.PROTO_EXTENSION));
+
+        mListener.onTestRunEnd(new DataRecord(), new Result());
+    }
+
+    /** Test that the collector handles multiple test correctly for per-test collection. */
+    @Test
+    public void testTestLevelCollection_multipleTests() throws Exception {
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_LEVEL));
+
+        // onTestRunStart(...) has to be called because the arguments are parsed here.
+        mListener.onTestRunStart(
+                new DataRecord(), Description.createSuiteDescription("Placeholder"));
+
+        DataRecord testData1 = mock(DataRecord.class);
+        Description description1 =
+                Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        mListener.onTestStart(testData1, description1);
+        mListener.onTestEnd(testData1, description1);
+
+        verify(testData1, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                String.valueOf(1)));
+
+        DataRecord testData2 = mock(DataRecord.class);
+        Description description2 =
+                Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_2);
+
+        mListener.onTestStart(testData2, description2);
+        mListener.onTestEnd(testData2, description2);
+
+        verify(testData2, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_2,
+                                String.valueOf(1)));
+
+        mListener.onTestRunEnd(new DataRecord(), new Result());
+    }
+
+    /** Test that the collector handles multiple iterations correctly for per-test collection. */
+    @Test
+    public void testTestLevelCollection_multipleIterations() throws Exception {
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_LEVEL));
+
+        // onTestRunStart(...) has to be called because the arguments are parsed here.
+        mListener.onTestRunStart(
+                new DataRecord(), Description.createSuiteDescription("Placeholder"));
+
+        DataRecord testData1 = mock(DataRecord.class);
+        Description description1 =
+                Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        mListener.onTestStart(testData1, description1);
+        mListener.onTestEnd(testData1, description1);
+
+        // The metric file name should contain the iteration number (1).
+        verify(testData1, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                String.valueOf(1)));
+
+        DataRecord testData2 = mock(DataRecord.class);
+        Description description2 =
+                Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        mListener.onTestStart(testData2, description2);
+        mListener.onTestEnd(testData2, description2);
+
+        // The metric file name should contain the iteration number (2).
+        verify(testData2, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                String.valueOf(2)));
+
+        mListener.onTestRunEnd(new DataRecord(), new Result());
+    }
+
+    /** Test that the collector can perform both run- and test-level collection in the same run. */
+    @Test
+    public void testRunAndTestLevelCollection() throws Exception {
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_RUN_LEVEL));
+        doReturn(CONFIG_MAP)
+                .when(mListener)
+                .getConfigsFromOption(eq(StatsdListener.OPTION_CONFIGS_TEST_LEVEL));
+
+        DataRecord runData = mock(DataRecord.class);
+        Description runDescription = Description.createSuiteDescription("TestRun");
+
+        mListener.onTestRunStart(runData, runDescription);
+
+        DataRecord testData = mock(DataRecord.class);
+        Description testDescription =
+                Description.createTestDescription(TEST_CLASS, TEST_METHOD_NAME_1);
+
+        mListener.onTestStart(testData, testDescription);
+        mListener.onTestEnd(testData, testDescription);
+
+        verify(testData, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getPartialFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_TEST_LEVEL)
+                                        .toString(),
+                                TEST_CLASS.getCanonicalName(),
+                                TEST_METHOD_NAME_1,
+                                String.valueOf(1)));
+
+        mListener.onTestRunEnd(runData, new Result());
+
+        verify(runData, times(1))
+                .addFileMetric(
+                        eq(CONFIG_NAME_1),
+                        getExactFileNameMatcher(
+                                Paths.get(
+                                                StatsdListener.REPORT_PATH_ROOT,
+                                                StatsdListener.REPORT_PATH_RUN_LEVEL)
+                                        .toString(),
+                                CONFIG_NAME_1 + StatsdListener.PROTO_EXTENSION));
+    }
+
     /** Test that the collector parses the configs from arguments correctly for valid configs. */
     @Test
     public void testParsingConfigFromArguments_validConfig() throws Exception {
@@ -185,12 +441,12 @@
 
         Bundle args = new Bundle();
         args.putString(
-                StatsdListener.OPTION_CONFIGS_TEST_RUN,
+                StatsdListener.OPTION_CONFIGS_RUN_LEVEL,
                 String.join(",", CONFIG_NAME_1, CONFIG_NAME_2));
         doReturn(args).when(mListener).getArguments();
 
         Map<String, StatsdConfig> configs =
-                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_TEST_RUN);
+                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_RUN_LEVEL);
         Assert.assertTrue(configs.containsKey(CONFIG_NAME_1));
         Assert.assertEquals(configs.get(CONFIG_NAME_1).getId(), CONFIG_ID_1);
         Assert.assertTrue(configs.containsKey(CONFIG_NAME_2));
@@ -208,24 +464,24 @@
                 .openConfigWithAssetManager(any(AssetManager.class), eq(CONFIG_NAME_1));
 
         Bundle args = new Bundle();
-        args.putString(StatsdListener.OPTION_CONFIGS_TEST_RUN, CONFIG_NAME_1);
+        args.putString(StatsdListener.OPTION_CONFIGS_RUN_LEVEL, CONFIG_NAME_1);
         doReturn(args).when(mListener).getArguments();
 
         mExpectedException.expectMessage("Cannot parse");
         Map<String, StatsdConfig> configs =
-                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_TEST_RUN);
+                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_RUN_LEVEL);
     }
 
     /** Test that the collector fails and throws the right exception for a nonexistent config. */
     @Test
     public void testParsingConfigFromArguments_nonexistentConfig() {
         Bundle args = new Bundle();
-        args.putString(StatsdListener.OPTION_CONFIGS_TEST_RUN, "nah");
+        args.putString(StatsdListener.OPTION_CONFIGS_RUN_LEVEL, "nah");
         doReturn(args).when(mListener).getArguments();
 
         mExpectedException.expectMessage("does not exist");
         Map<String, StatsdConfig> configs =
-                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_TEST_RUN);
+                mListener.getConfigsFromOption(StatsdListener.OPTION_CONFIGS_RUN_LEVEL);
     }
 
     /** Test that the collector has no effect when no config arguments are supplied. */
@@ -246,7 +502,19 @@
         verify(mListener, never()).removeStatsConfig(anyLong());
     }
 
-    private File getFileNameMatcher(String parentName, String filename) {
+    /** Returns a Mockito argument matcher that matches the exact file name. */
+    private File getExactFileNameMatcher(String parentName, String filename) {
         return argThat(f -> f.getParent().contains(parentName) && f.getName().equals(filename));
     }
+
+    /** Returns a Mockito argument matcher that matche a file name to one or more substrings. */
+    private File getPartialFileNameMatcher(
+            String parentName, String component, String... moreComponents) {
+        return argThat(
+                f ->
+                        f.getParent().contains(parentName)
+                                && f.getName().contains(component)
+                                && Arrays.stream(moreComponents)
+                                        .allMatch(c -> f.getName().contains(c)));
+    }
 }