Snap for 12359238 from e504bed59adbf9b4a0df7fac4e82165f37ae7a5e to android15-tests-release
Change-Id: I88e6c0d7dadac91fdd351c1ffd4d72dbb322104d
diff --git a/adevice/src/device.rs b/adevice/src/device.rs
index 50e1239..32700d4 100644
--- a/adevice/src/device.rs
+++ b/adevice/src/device.rs
@@ -64,13 +64,13 @@
/// First ask adb to wait for the device, then poll for sys.boot_completed on the device.
fn wait(&self, profiler: &mut Profiler) -> Result<String> {
// Typically the reboot on acloud is 25 secs
- // And another 50 for fully booted
- // Wait up to 3 times as long for either'
+ // It can take 130 seconds after for a full boot.
+ // Setting timeouts to have at least 2x that.
progress::start(" * [1/2] Waiting for device to connect.");
time!(
{
let args = self.adjust_adb_args(&["wait-for-device".to_string()]);
- self.wait_for_adb_with_timeout(&args, Duration::from_secs(70))?;
+ self.wait_for_adb_with_timeout(&args, Duration::from_secs(75))?;
},
profiler.wait_for_device
);
@@ -83,7 +83,7 @@
"shell".to_string(),
"while [[ -z $(getprop sys.boot_completed) ]]; do sleep 1; done".to_string(),
]);
- let result = self.wait_for_adb_with_timeout(&args, Duration::from_secs(100));
+ let result = self.wait_for_adb_with_timeout(&args, Duration::from_secs(260));
progress::stop();
result
},
diff --git a/adevice/src/metrics.rs b/adevice/src/metrics.rs
index bddab2a..b091a5c 100644
--- a/adevice/src/metrics.rs
+++ b/adevice/src/metrics.rs
@@ -43,6 +43,7 @@
events: Vec<LogEvent>,
user: String,
invocation_id: String,
+ hostname: String,
}
impl MetricSender for Metrics {
@@ -51,6 +52,7 @@
start_event.set_command_line(command_line.to_string());
start_event.set_source_root(source_root.to_string());
start_event.set_target(env::var(ENV_TARGET).unwrap_or("".to_string()));
+ start_event.set_hostname(self.hostname.to_string());
let mut event = self.default_log_event();
event.set_adevice_start_event(start_event);
@@ -147,6 +149,7 @@
events: Vec::new(),
user: env::var(ENV_USER).unwrap_or("".to_string()),
invocation_id: Uuid::new_v4().to_string(),
+ hostname: get_hostname(),
}
}
}
@@ -196,6 +199,19 @@
}
}
+fn get_hostname() -> String {
+ Command::new("hostname").output().map_or_else(
+ |_err| String::new(),
+ |output| {
+ if output.status.success() {
+ String::from_utf8_lossy(&output.stdout).trim().to_string()
+ } else {
+ String::new()
+ }
+ },
+ )
+}
+
impl Drop for Metrics {
fn drop(&mut self) {
match self.send() {
diff --git a/adevice/src/protos/user_log.proto b/adevice/src/protos/user_log.proto
index 1b7e20d..e936abf 100644
--- a/adevice/src/protos/user_log.proto
+++ b/adevice/src/protos/user_log.proto
@@ -18,6 +18,7 @@
optional string os = 4;
optional string target = 5;
optional string source_root = 6;
+ optional string hostname = 7;
}
// Occurs when adevice exits for any reason
message AdeviceExitEvent {
diff --git a/atest/arg_parser.py b/atest/arg_parser.py
index c698f58..27143d3 100644
--- a/atest/arg_parser.py
+++ b/atest/arg_parser.py
@@ -432,9 +432,10 @@
# TODO(b/326141263): TradeFed to support wildcard in include-filter for
# parametrized JarHostTests
help=(
- 'Run only the tests which are specified with this option. '
- 'Filtering by method and with wildcard is not yet supported for '
- 'all test types.'
+ 'Run only the tests which are specified with this option. This value'
+ ' is passed directly to the testing framework so you should use'
+ " appropriate syntax (e.g. JUnit supports regex, while python's"
+ ' unittest supports fnmatch syntax).'
),
)
parser.add_argument(
diff --git a/atest/asuite_lib_test/Android.bp b/atest/asuite_lib_test/Android.bp
index 7f0a7cb..633d9a9 100644
--- a/atest/asuite_lib_test/Android.bp
+++ b/atest/asuite_lib_test/Android.bp
@@ -27,7 +27,6 @@
pkg_path: "asuite_test",
srcs: [
"asuite_lib_run_tests.py",
- "asuite_cc_client_test.py",
],
libs: [
"asuite_cc_client",
diff --git a/atest/asuite_lib_test/asuite_cc_client_test.py b/atest/asuite_lib_test/asuite_cc_client_test.py
deleted file mode 100644
index e57a372..0000000
--- a/atest/asuite_lib_test/asuite_cc_client_test.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2019, The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-"""Unittest for atest_execution_info."""
-
-import unittest
-
-
-class AsuiteCCLibTest(unittest.TestCase):
- """Tests for verify asuite_metrics libs"""
-
- def test_import_asuite_cc_lib(self):
- """Test asuite_cc_lib."""
- # pylint: disable=unused-variable
- # pylint: disable=import-outside-toplevel
- # pylint: disable=unused-import
- from atest.metrics import metrics
- from atest.metrics import metrics_base
- from atest.metrics import metrics_utils
-
- # TODO (b/132602907): Add the real usage for checking if metrics pass or
- # fail.
- metrics_base.MetricsBase.tool_name = 'MyTestTool'
- metrics_utils.get_start_time()
- metrics.AtestStartEvent(
- command_line='test_command',
- test_references='test_reference',
- cwd='test_cwd',
- os='test_os',
- )
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/atest/atest_main.py b/atest/atest_main.py
index 2eb5956..3b6eceb 100755
--- a/atest/atest_main.py
+++ b/atest/atest_main.py
@@ -908,21 +908,6 @@
return max_count
-def _send_start_event(argv: List[Any], tests: List[str]):
- """Send AtestStartEvent to metrics"""
- os_pyver = (
- f'{platform.platform()}:{platform.python_version()}/'
- f'{atest_utils.get_manifest_branch(True)}:'
- f'{atest_utils.get_atest_version()}'
- )
- metrics.AtestStartEvent(
- command_line=' '.join(argv),
- test_references=tests,
- cwd=os.getcwd(),
- os=os_pyver,
- )
-
-
def _get_acloud_proc_and_log(
args: argparse.ArgumentParser, results_dir: str
) -> Tuple[Any, Any]:
@@ -1006,8 +991,18 @@
set_build_output_mode(args.build_output)
_validate_args(args)
- metrics_utils.get_start_time()
- _send_start_event(argv, args.tests)
+ metrics_utils.send_start_event(
+ command_line=' '.join(argv),
+ test_references=args.tests,
+ cwd=os.getcwd(),
+ operating_system=(
+ f'{platform.platform()}:{platform.python_version()}/'
+ f'{atest_utils.get_manifest_branch(True)}:'
+ f'{atest_utils.get_atest_version()}'
+ ),
+ source_root=os.environ.get('ANDROID_BUILD_TOP', ''),
+ hostname=platform.node(),
+ )
_non_action_validator(args)
proc_acloud, report_file = _get_acloud_proc_and_log(args, results_dir)
diff --git a/atest/atest_utils.py b/atest/atest_utils.py
index e21071c..91ec36b 100644
--- a/atest/atest_utils.py
+++ b/atest/atest_utils.py
@@ -221,7 +221,12 @@
)
if dump:
return [make_cmd, '--dumpvar-mode', 'report_config']
- return [make_cmd, '--make-mode', 'WRAPPER_TOOL=atest']
+ return [
+ make_cmd,
+ '--make-mode',
+ 'WRAPPER_TOOL=atest',
+ f'ATEST_RUN_ID={metrics.get_run_id()}',
+ ]
def _capture_fail_section(full_log):
diff --git a/atest/bazel/resources/WORKSPACE b/atest/bazel/resources/WORKSPACE
index 225c0c7..5d1ef35 100644
--- a/atest/bazel/resources/WORKSPACE
+++ b/atest/bazel/resources/WORKSPACE
@@ -16,6 +16,10 @@
path = "external/bazelbuild-rules_python",
)
+load("@rules_python//python:repositories.bzl", "py_repositories")
+
+py_repositories()
+
local_repository(
name = "rules_java",
path = "external/bazelbuild-rules_java",
diff --git a/atest/metrics/metrics.py b/atest/metrics/metrics.py
index ab65498..01aa211 100644
--- a/atest/metrics/metrics.py
+++ b/atest/metrics/metrics.py
@@ -34,6 +34,8 @@
test_references = constants.INTERNAL
cwd = constants.INTERNAL
os = constants.INTERNAL
+ source_root = constants.INTERNAL
+ hostname = constants.INTERNAL
class AtestExitEvent(metrics_base.MetricsBase):
diff --git a/atest/metrics/metrics_base.py b/atest/metrics/metrics_base.py
index a6cfa2f..370af20 100644
--- a/atest/metrics/metrics_base.py
+++ b/atest/metrics/metrics_base.py
@@ -179,6 +179,6 @@
A clientanalytics_pb2.LogEvent instance.
"""
log_event = clientanalytics_pb2.LogEvent()
- log_event.event_time_ms = int((time.time() - random.randint(1, 600)) * 1000)
+ log_event.event_time_ms = int(time.time() * 1000)
log_event.source_extension = atest_event.SerializeToString()
return log_event
diff --git a/atest/metrics/metrics_utils.py b/atest/metrics/metrics_utils.py
index a1944fb..5615c69 100644
--- a/atest/metrics/metrics_utils.py
+++ b/atest/metrics/metrics_utils.py
@@ -14,8 +14,6 @@
"""Utility functions for metrics."""
-import os
-import platform
import sys
import time
import traceback
@@ -23,6 +21,7 @@
from atest.metrics import metrics
from atest.metrics import metrics_base
+
CONTENT_LICENSES_URL = 'https://source.android.com/setup/start/licenses'
CONTRIBUTOR_AGREEMENT_URL = {
'INTERNAL': 'https://cla.developers.google.com/',
@@ -112,34 +111,26 @@
def send_start_event(
- tool_name,
- command_line='',
- test_references='',
- cwd=None,
- operating_system=None,
+ command_line, test_references, cwd, operating_system, source_root, hostname
):
"""Log start event of clearcut.
Args:
- tool_name: A string of the asuite product name.
command_line: A string of the user input command.
test_references: A string of the input tests.
cwd: A string of current path.
operating_system: A string of user's operating system.
+ source_root: A string of the Android build source.
+ hostname: A string of the host workstation name.
"""
- if not cwd:
- cwd = os.getcwd()
- if not operating_system:
- operating_system = platform.platform()
- # Without tool_name information, asuite's clearcut client will not send
- # event to server.
- metrics_base.MetricsBase.tool_name = tool_name
get_start_time()
metrics.AtestStartEvent(
command_line=command_line,
test_references=test_references,
cwd=cwd,
os=operating_system,
+ source_root=source_root,
+ hostname=hostname,
)
diff --git a/atest/metrics/metrics_utils_unittest.py b/atest/metrics/metrics_utils_unittest.py
index c4463db..2d6be11 100755
--- a/atest/metrics/metrics_utils_unittest.py
+++ b/atest/metrics/metrics_utils_unittest.py
@@ -23,7 +23,9 @@
import unittest
from unittest import mock
+from atest.metrics import metrics_base
from atest.metrics import metrics_utils
+from atest.proto import internal_user_log_pb2
class MetricsUtilsUnittests(unittest.TestCase):
@@ -65,3 +67,49 @@
metrics_utils.print_data_collection_notice()
sys.stdout = sys.__stdout__
self.assertEqual(capture_output.getvalue(), notice_str)
+
+ def test_send_start_event(self):
+ metrics_base.MetricsBase.tool_name = 'test_tool'
+ metrics_base.MetricsBase.user_type = metrics_base.INTERNAL_USER
+ fake_cc = FakeClearcutClient()
+ metrics_base.MetricsBase.cc = fake_cc
+
+ metrics_utils.send_start_event(
+ command_line='test_command',
+ test_references=['test'],
+ cwd='cwd',
+ operating_system='test system',
+ source_root='test_source',
+ hostname='test_host',
+ )
+
+ logged_events = fake_cc.get_logged_events()
+ expected_start_event = (
+ internal_user_log_pb2.AtestLogEventInternal.AtestStartEvent(
+ command_line='test_command',
+ test_references=['test'],
+ cwd='cwd',
+ os='test system',
+ source_root='test_source',
+ hostname='test_host',
+ )
+ )
+ self.assertEqual(len(logged_events), 1)
+ self.assertEqual(
+ expected_start_event,
+ internal_user_log_pb2.AtestLogEventInternal.FromString(
+ logged_events[0].source_extension
+ ).atest_start_event,
+ )
+
+
+class FakeClearcutClient:
+
+ def __init__(self):
+ self.logged_event = []
+
+ def log(self, event):
+ self.logged_event.extend([event])
+
+ def get_logged_events(self):
+ return self.logged_event
diff --git a/atest/proto/internal_user_log.proto b/atest/proto/internal_user_log.proto
index 0da92ff..f4bb649 100644
--- a/atest/proto/internal_user_log.proto
+++ b/atest/proto/internal_user_log.proto
@@ -16,6 +16,8 @@
repeated string test_references = 2;
optional string cwd = 3;
optional string os = 4;
+ optional string source_root = 5;
+ optional string hostname = 6;
}
// Occurs when atest exits for any reason
@@ -75,7 +77,7 @@
optional UserType user_type = 3;
optional string tool_name = 10;
optional string sub_tool_name = 12;
- optional string user_name = 13; // ldap of the internal users
+ optional string user_name = 13 [deprecated = true];
oneof event {
AtestStartEvent atest_start_event = 4;
AtestExitEvent atest_exit_event = 5;
diff --git a/atest/test_runners/atest_tf_test_runner.py b/atest/test_runners/atest_tf_test_runner.py
index e0a9824..2c31fab 100644
--- a/atest/test_runners/atest_tf_test_runner.py
+++ b/atest/test_runners/atest_tf_test_runner.py
@@ -46,6 +46,7 @@
from atest.metrics import metrics
from atest.test_finders import test_finder_utils
from atest.test_finders import test_info
+from atest.test_finders.test_info import TestInfo
from atest.test_runner_invocation import TestRunnerInvocation
from atest.test_runners import test_runner_base as trb
from atest.test_runners.event_handler import EventHandler
@@ -848,7 +849,7 @@
else self._TF_DEVICE_TEST_TEMPLATE
)
- args = self._create_test_args(test_infos)
+ args = self._create_test_args(test_infos, extra_args)
# Create a copy of args as more args could be added to the list.
test_args = list(args)
@@ -1061,11 +1062,14 @@
return False
return True
- def _create_test_args(self, test_infos):
+ def _create_test_args(
+ self, test_infos: list[TestInfo], extra_args: Dict[str, Any]
+ ) -> list[str]:
"""Compile TF command line args based on the given test infos.
Args:
test_infos: A list of TestInfo instances.
+ extra_args: A Dict of extra args for test runners to utilize.
Returns: A list of TF arguments to run the tests.
"""
@@ -1122,7 +1126,11 @@
args.extend([constants.TF_MODULE_ARG, module_arg])
# Add ATest include filter
- args.extend(get_include_filter(test_infos))
+ args.extend(
+ get_include_filter(
+ test_infos, extra_args.get(constants.TEST_FILTER, None)
+ )
+ )
# TODO (b/141090547) Pass the config path to TF to load configs.
# Compile option in TF if finder is not INTEGRATION or not set.
@@ -1495,11 +1503,15 @@
return supported_args, unsupported_args
-def get_include_filter(test_infos: List[test_info.TestInfo]) -> List[str]:
+def get_include_filter(
+ test_infos: List[test_info.TestInfo], test_filter_arg: str = None
+) -> List[str]:
"""Generate a list of tradefed filter argument from TestInfos.
Args:
test_infos: a List of TestInfo object.
+ test_filter_arg: the value of the desired test filter passed by the user
+ using the --test-filter flag.
The include filter pattern looks like:
--atest-include-filter <module-name>:<include-filter-value>
@@ -1507,19 +1519,27 @@
Returns:
List of Tradefed command args.
"""
- instrumentation_filters = []
tf_args = []
for info in test_infos:
+ # If a --test-filter is specified by the user, use the test filter in addition to the
+ # fully qualified module:test#method name for each test.
+ if test_filter_arg:
+ formatted_test_filter_arg = (
+ constants.TF_ATEST_INCLUDE_FILTER_VALUE_FMT.format(
+ test_name=info.test_name, test_filter=test_filter_arg
+ )
+ )
+ tf_args.extend(
+ [constants.TF_ATEST_INCLUDE_FILTER, formatted_test_filter_arg]
+ )
filters = []
for test_info_filter in info.data.get(constants.TI_FILTER, []):
filters.extend(test_info_filter.to_list_of_tf_strings())
-
for test_filter in filters:
filter_arg = constants.TF_ATEST_INCLUDE_FILTER_VALUE_FMT.format(
test_name=info.test_name, test_filter=test_filter
)
tf_args.extend([constants.TF_ATEST_INCLUDE_FILTER, filter_arg])
-
return tf_args
@@ -1619,8 +1639,7 @@
# can't determine whether they require device update or not. So that we
# treat them as they require device update to avoid disabling the device
# update mistakenly.
- return not self._info or not module_info.ModuleInfo.is_unit_test(
- self._info)
+ return not self._info or not module_info.ModuleInfo.is_unit_test(self._info)
def _get_test_build_targets(self) -> Set[Target]:
module_name = self._info[constants.MODULE_INFO_ID]
diff --git a/atest/test_runners/atest_tf_test_runner_unittest.py b/atest/test_runners/atest_tf_test_runner_unittest.py
index d93e596..bea7c72 100755
--- a/atest/test_runners/atest_tf_test_runner_unittest.py
+++ b/atest/test_runners/atest_tf_test_runner_unittest.py
@@ -793,26 +793,41 @@
# Only compile '--skip-loading-config-jar' in TF if it's not
# INTEGRATION finder or the finder property isn't set.
mock_config.return_value = '', ''
- args = self.tr._create_test_args([MOD_INFO])
+ args = self.tr._create_test_args([MOD_INFO], {})
self.assertTrue(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
- args = self.tr._create_test_args([INT_INFO])
+ args = self.tr._create_test_args([INT_INFO], {})
self.assertFalse(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
- args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER])
+ args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER], {})
self.assertFalse(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
- args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER, INT_INFO])
+ args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER, INT_INFO], {})
self.assertFalse(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
- args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER])
+ args = self.tr._create_test_args([MOD_INFO_NO_TEST_FINDER], {})
self.assertFalse(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
args = self.tr._create_test_args(
- [MOD_INFO_NO_TEST_FINDER, INT_INFO, MOD_INFO]
+ [MOD_INFO_NO_TEST_FINDER, INT_INFO, MOD_INFO], {}
)
self.assertFalse(constants.TF_SKIP_LOADING_CONFIG_JAR in args)
+ @mock.patch.object(test_finder_utils, 'get_test_config_and_srcs')
+ def test_create_test_args_with_test_filter_appends_to_atest_include_filter(
+ self, mock_config
+ ):
+ mock_config.return_value = '', ''
+ args = self.tr._create_test_args(
+ [MOD_INFO], {constants.TEST_FILTER: '*MyTestFilter*'}
+ )
+
+ self.assertEqual(args.count(constants.TF_ATEST_INCLUDE_FILTER), 1)
+ self.assertEqual(
+ args[args.index(constants.TF_ATEST_INCLUDE_FILTER) + 1],
+ uc.MODULE_NAME + ':*MyTestFilter*',
+ )
+
@mock.patch.object(
atf_tr.AtestTradefedTestRunner,
'_is_all_tests_parameter_auto_enabled',
@@ -1029,7 +1044,7 @@
"""Test _create_test_args method with auto enabled parameter config."""
# Should have --m on args and should not have --include-filter.
mock_config.return_value = '', ''
- args = self.tr._create_test_args([MOD_INFO])
+ args = self.tr._create_test_args([MOD_INFO], {})
self.assertTrue(constants.TF_MODULE_FILTER in args)
self.assertFalse(constants.TF_INCLUDE_FILTER in args)
@@ -1197,7 +1212,7 @@
"""
# Should not --m on args and should have --include-filter.
mock_config.return_value = '', ''
- args = self.tr._create_test_args([MOD_INFO])
+ args = self.tr._create_test_args([MOD_INFO], {})
self.assertFalse(constants.TF_MODULE_FILTER in args)
self.assertTrue(constants.TF_INCLUDE_FILTER in args)