Snap for 6533464 from d69477698ad757bcd50f384b3708c1ccdc20e2ae to sdk-release
Change-Id: I5d90ef9388b583b8b582326ad75dbd4c3502add4
diff --git a/OWNERS b/OWNERS
index a4eaf30..7e1af30 100644
--- a/OWNERS
+++ b/OWNERS
@@ -2,3 +2,4 @@
kevcheng@google.com
albaltai@google.com
patricktu@google.com
+yangbill@google.com
diff --git a/aidegen/aidegen_main.py b/aidegen/aidegen_main.py
index 41b9d35..05b8faf 100644
--- a/aidegen/aidegen_main.py
+++ b/aidegen/aidegen_main.py
@@ -41,6 +41,7 @@
import argparse
import logging
+import os
import sys
import traceback
@@ -62,7 +63,7 @@
AIDEGEN_REPORT_LINK = ('To report the AIDEGen tool problem, please use this '
'link: https://goto.google.com/aidegen-bug')
-_CONGRATULATION = common_util.COLORED_PASS('CONGRATULATION:')
+_CONGRATULATIONS = common_util.COLORED_PASS('CONGRATULATIONS:')
_LAUNCH_SUCCESS_MSG = (
'IDE launched successfully. Please check your IDE window.')
_LAUNCH_ECLIPSE_SUCCESS_MSG = (
@@ -199,14 +200,18 @@
project_absolute_path: A string of project absolute path.
"""
ide_util_obj.config_ide(project_absolute_path)
- ide_util_obj.launch_ide()
if ide_util_obj.ide_name() == constant.IDE_ECLIPSE:
launch_msg = ' '.join([_LAUNCH_SUCCESS_MSG,
_LAUNCH_ECLIPSE_SUCCESS_MSG.format(
PROJECT_PATH=project_absolute_path)])
else:
launch_msg = _LAUNCH_SUCCESS_MSG
- print('\n{} {}\n'.format(_CONGRATULATION, launch_msg))
+ print('\n{} {}\n'.format(_CONGRATULATIONS, launch_msg))
+ print('\n{} {}\n'.format(_INFO, _IDE_CACHE_REMINDER_MSG))
+ # Send the end message to Clearcut server before launching IDE to make sure
+ # the execution time is correct.
+ aidegen_metrics.ends_asuite_metrics(constant.EXIT_CODE_EXCEPTION)
+ ide_util_obj.launch_ide()
def _launch_native_projects(ide_util_obj, args, cmakelists):
@@ -275,9 +280,10 @@
both=False):
"""Deals with the suitable IDE launch action.
- CLion only supports C/C++ and EClipse only supports Java right now, if users
- launch these two IDEs through AIDEGen we don't ask users to choose one of
- the languages.
+ The rules AIDEGen won't ask users to choose one of the languages are:
+ 1. Users set CLion as IDE: CLion only supports C/C++.
+ 2. Test mode is true: if AIDEGEN_TEST_MODE is true the default language is
+ Java.
Args:
args: A list of system arguments.
@@ -297,6 +303,9 @@
answer = None
if constant.IDE_NAME_DICT[args.ide[0]] == constant.IDE_CLION:
answer = constant.C_CPP
+ elif common_util.to_boolean(
+ os.environ.get(constant.AIDEGEN_TEST_MODE, 'false')):
+ answer = constant.JAVA
if not answer and jlist and clist:
answer = _get_preferred_ide_from_user(_LANGUAGE_OPTIONS)
if (jlist and not clist) or (answer == constant.JAVA):
@@ -370,8 +379,10 @@
argv: A list of system arguments.
"""
exit_code = constant.EXIT_CODE_NORMAL
+ launch_ide = True
try:
args = _parse_args(argv)
+ launch_ide = not args.no_launch
common_util.configure_logging(args.verbose)
is_whole_android_tree = project_config.is_whole_android_tree(
args.targets, args.android_tree)
@@ -400,10 +411,10 @@
print(traceback_str)
raise err
finally:
- if exit_code is constant.EXIT_CODE_NORMAL:
+ print('\n{0} {1}\n'.format(_INFO, AIDEGEN_REPORT_LINK))
+ # Send the end message here on ignoring launch IDE case.
+ if not launch_ide and exit_code is constant.EXIT_CODE_NORMAL:
aidegen_metrics.ends_asuite_metrics(exit_code)
- print('\n{0} {1}\n\n{0} {2}\n'.format(_INFO, AIDEGEN_REPORT_LINK,
- _IDE_CACHE_REMINDER_MSG))
def aidegen_main(args):
diff --git a/aidegen/aidegen_main_unittest.py b/aidegen/aidegen_main_unittest.py
index 8396109..c84f16e 100644
--- a/aidegen/aidegen_main_unittest.py
+++ b/aidegen/aidegen_main_unittest.py
@@ -18,6 +18,7 @@
from __future__ import print_function
+import os
import sys
import unittest
from unittest import mock
@@ -128,10 +129,12 @@
"""Test main with normal conditions."""
aidegen_main.main(['-h'])
self.assertFalse(mock_main.called)
- mock_ends_metrics.assert_called_with(constant.EXIT_CODE_NORMAL)
mock_is_whole_tree.return_value = True
aidegen_main.main([''])
mock_starts_metrics.assert_called_with([constant.ANDROID_TREE])
+ self.assertFalse(mock_ends_metrics.called)
+ aidegen_main.main(['-n'])
+ mock_ends_metrics.assert_called_with(constant.EXIT_CODE_NORMAL)
@mock.patch.object(aidegen_metrics, 'ends_asuite_metrics')
@mock.patch.object(aidegen_main, 'main_with_message')
@@ -347,6 +350,23 @@
self.assertTrue(mock_c_prj.called)
self.assertTrue(mock_genc.called)
+ args = aidegen_main._parse_args(['frameworks/base'])
+ mock_vs.reset_mock()
+ mock_choice.reset_mock()
+ mock_c.reset_mock()
+ mock_genc.reset_mock()
+ mock_c_prj.reset_mock()
+ mock_j.reset_mock()
+ os.environ[constant.AIDEGEN_TEST_MODE] = 'true'
+ aidegen_main._launch_ide_by_module_contents(args, None, test_j, test_c)
+ self.assertFalse(mock_vs.called)
+ self.assertFalse(mock_choice.called)
+ self.assertTrue(mock_j.called)
+ self.assertFalse(mock_c.called)
+ self.assertFalse(mock_c_prj.called)
+ self.assertFalse(mock_genc.called)
+ del os.environ[constant.AIDEGEN_TEST_MODE]
+
@mock.patch.object(aidegen_main, '_launch_ide')
@mock.patch.object(aidegen_main, '_generate_project_files')
@mock.patch.object(project_info.ProjectInfo, 'multi_projects_locate_source')
diff --git a/aidegen/constant.py b/aidegen/constant.py
index f5d3928..a52288c 100644
--- a/aidegen/constant.py
+++ b/aidegen/constant.py
@@ -25,6 +25,7 @@
GEN_JAVA_DEPS = 'SOONG_COLLECT_JAVA_DEPS'
GEN_CC_DEPS = 'SOONG_COLLECT_CC_DEPS'
GEN_COMPDB = 'SOONG_GEN_COMPDB'
+AIDEGEN_TEST_MODE = 'AIDEGEN_TEST_MODE'
# Constants for module's info.
KEY_PATH = 'path'
@@ -42,6 +43,11 @@
KEY_TEST_CONFIG = 'test_config'
KEY_HEADER = 'header_search_path'
KEY_SYSTEM = 'system_search_path'
+KEY_TESTS = 'tests'
+KEY_JARS = 'jars'
+KEY_DEP_SRCS = 'dep_srcs'
+KEY_IML_NAME = 'iml_name'
+KEY_EXCLUDES = 'excludes'
# Java related classes.
JAVA_TARGET_CLASSES = ['APPS', 'JAVA_LIBRARIES', 'ROBOLECTRIC']
@@ -93,6 +99,7 @@
ANDROID_MK = 'Android.mk'
JAVA_FILES = '*.java'
VSCODE_CONFIG_DIR = '.vscode'
+ANDROID_MANIFEST = 'AndroidManifest.xml'
# Constants for file paths.
RELATIVE_NATIVE_PATH = 'development/ide/clion'
@@ -126,4 +133,18 @@
# Constants for default modules.
FRAMEWORK_ALL = 'framework-all'
-CORE_ALL = 'core-all'
\ No newline at end of file
+CORE_ALL = 'core-all'
+FRAMEWORK_SRCJARS = 'framework_srcjars'
+
+# Constants for module's path.
+FRAMEWORK_PATH = 'frameworks/base'
+LIBCORE_PATH = 'libcore'
+
+# Constants for regular expression
+RE_INSIDE_PATH_CHECK = r'^{}($|/.+)'
+
+# Constants for Git
+GIT_FOLDER_NAME = '.git'
+
+# Constants for Idea
+IDEA_FOLDER = '.idea'
diff --git a/aidegen/data/jdk.table.xml b/aidegen/data/jdk.table.xml
index 51baf8b..472abe9 100644
--- a/aidegen/data/jdk.table.xml
+++ b/aidegen/data/jdk.table.xml
@@ -1,4 +1,4 @@
<application>
<component name="ProjectJdkTable">
</component>
-</application>
\ No newline at end of file
+</application>
diff --git a/aidegen/idea/iml.py b/aidegen/idea/iml.py
index 3e78037..9e3343d 100644
--- a/aidegen/idea/iml.py
+++ b/aidegen/idea/iml.py
@@ -28,6 +28,7 @@
from __future__ import absolute_import
+import logging
import os
from aidegen import constant
@@ -38,11 +39,25 @@
class IMLGenerator:
"""Creates the iml file for each module.
+ Class attributes:
+ _USED_NAME_CACHE: A dict to cache already used iml project file names
+ and prevent duplicated iml names from breaking IDEA.
+
Attributes:
_mod_info: A dictionary of the module's data from module-info.json.
+ _android_root: A string ot the Android root's absolute path.
+ _mod_path: A string of the module's absolute path.
_iml_path: A string of the module's iml absolute path.
- _srcjar_urls: A list of srcjar urls.
+ _facet: A string of the facet setting.
+ _excludes: A string of the exclude relative paths.
+ _srcs: A string of the source urls.
+ _jars: A list of the jar urls.
+ _srcjars: A list of srcjar urls.
+ _deps: A list of the dependency module urls.
"""
+ # b/121256503: Prevent duplicated iml names from breaking IDEA.
+ # Use a map to cache in-using(already used) iml project file names.
+ USED_NAME_CACHE = dict()
def __init__(self, mod_info):
"""Initializes IMLGenerator.
@@ -51,39 +66,161 @@
mod_info: A dictionary of the module's data from module-info.json.
"""
self._mod_info = mod_info
- self._iml_path = os.path.join(common_util.get_android_root_dir(),
- mod_info[constant.KEY_PATH][0],
- mod_info[constant.KEY_MODULE_NAME]
- + '.iml')
- self._srcjar_urls = []
+ self._android_root = common_util.get_android_root_dir()
+ self._mod_path = os.path.join(self._android_root,
+ mod_info[constant.KEY_PATH][0])
+ self._iml_path = os.path.join(self._mod_path,
+ mod_info[constant.KEY_IML_NAME] + '.iml')
+ self._facet = ''
+ self._excludes = ''
+ self._srcs = ''
+ self._jars = []
+ self._srcjars = []
+ self._deps = []
+
+ @classmethod
+ def get_unique_iml_name(cls, abs_module_path):
+ """Create a unique iml name if needed.
+
+ If the name of last sub folder is used already, prefixing it with prior
+ sub folder names as a candidate name. If finally, it's unique, storing
+ in USED_NAME_CACHE as: { abs_module_path:unique_name }. The cts case
+ and UX of IDE view are the main reasons why using module path strategy
+ but not name of module directly. Following is the detailed strategy:
+ 1. While loop composes a sensible and shorter name, by checking unique
+ to finish the loop and finally add to cache.
+ Take ['cts', 'tests', 'app', 'ui'] an example, if 'ui' isn't
+ occupied, use it, else try 'cts_ui', then 'cts_app_ui', the worst
+ case is whole three candidate names are occupied already.
+ 2. 'Else' for that while stands for no suitable name generated, so
+ trying 'cts_tests_app_ui' directly. If it's still non unique, e.g.,
+ module path cts/xxx/tests/app/ui occupied that name already,
+ appending increasing sequence number to get a unique name.
+
+ Args:
+ abs_module_path: The absolute module path string.
+
+ Return:
+ String: A unique iml name.
+ """
+ if abs_module_path in cls.USED_NAME_CACHE:
+ return cls.USED_NAME_CACHE[abs_module_path]
+
+ uniq_name = abs_module_path.strip(os.sep).split(os.sep)[-1]
+ if any(uniq_name == name for name in cls.USED_NAME_CACHE.values()):
+ parent_path = os.path.relpath(abs_module_path,
+ common_util.get_android_root_dir())
+ sub_folders = parent_path.split(os.sep)
+ zero_base_index = len(sub_folders) - 1
+ # Start compose a sensible, shorter and unique name.
+ while zero_base_index > 0:
+ uniq_name = '_'.join(
+ [sub_folders[0], '_'.join(sub_folders[zero_base_index:])])
+ zero_base_index = zero_base_index - 1
+ if uniq_name not in cls.USED_NAME_CACHE.values():
+ break
+ else:
+ # TODO(b/133393638): To handle several corner cases.
+ uniq_name_base = parent_path.strip(os.sep).replace(os.sep, '_')
+ i = 0
+ uniq_name = uniq_name_base
+ while uniq_name in cls.USED_NAME_CACHE.values():
+ i = i + 1
+ uniq_name = '_'.join([uniq_name_base, str(i)])
+ cls.USED_NAME_CACHE[abs_module_path] = uniq_name
+ logging.debug('Unique name for module path of %s is %s.',
+ abs_module_path, uniq_name)
+ return uniq_name
@property
def iml_path(self):
"""Gets the iml path."""
return self._iml_path
- def create(self, create_content):
+ def create(self, content_type):
"""Creates the iml file.
Create the iml file with specific part of sources.
- e.g. {'srcjars': True}
+ e.g.
+ {
+ 'srcs': True,
+ 'dependencies': True,
+ }
Args:
- create_content: A dict to set which part of sources will be created.
+ content_type: A dict to set which part of sources will be created.
"""
- if create_content[constant.KEY_SRCJARS]:
+ if content_type.get(constant.KEY_SRCS, None):
+ self._generate_srcs()
+ if content_type.get(constant.KEY_DEP_SRCS, None):
+ self._generate_dep_srcs()
+ if content_type.get(constant.KEY_JARS, None):
+ self._generate_jars()
+ if content_type.get(constant.KEY_SRCJARS, None):
self._generate_srcjars()
- if self._srcjar_urls:
+ if content_type.get(constant.KEY_DEPENDENCIES, None):
+ self._generate_dependencies()
+
+ if self._srcs or self._jars or self._srcjars or self._deps:
self._create_iml()
+ def _generate_facet(self):
+ """Generates the facet when the AndroidManifest.xml exists."""
+ if os.path.exists(os.path.join(self._mod_path,
+ constant.ANDROID_MANIFEST)):
+ self._facet = templates.FACET
+
+ def _generate_srcs(self):
+ """Generates the source urls of the project's iml file."""
+ srcs = []
+ for src in self._mod_info[constant.KEY_SRCS]:
+ srcs.append(templates.SOURCE.format(
+ SRC=os.path.join(self._android_root, src),
+ IS_TEST='false'))
+ for test in self._mod_info[constant.KEY_TESTS]:
+ srcs.append(templates.SOURCE.format(
+ SRC=os.path.join(self._android_root, test),
+ IS_TEST='true'))
+ self._excludes = self._mod_info.get(constant.KEY_EXCLUDES, '')
+ self._srcs = templates.CONTENT.format(MODULE_PATH=self._mod_path,
+ EXCLUDES=self._excludes,
+ SOURCES=''.join(sorted(srcs)))
+
+ def _generate_dep_srcs(self):
+ """Generates the source urls of the dependencies.iml."""
+ srcs = []
+ for src in self._mod_info[constant.KEY_SRCS]:
+ srcs.append(templates.OTHER_SOURCE.format(
+ SRC=os.path.join(self._android_root, src),
+ IS_TEST='false'))
+ for test in self._mod_info[constant.KEY_TESTS]:
+ srcs.append(templates.OTHER_SOURCE.format(
+ SRC=os.path.join(self._android_root, test),
+ IS_TEST='true'))
+ self._srcs = ''.join(sorted(srcs))
+
+ def _generate_jars(self):
+ """Generates the jar urls."""
+ for jar in self._mod_info[constant.KEY_JARS]:
+ self._jars.append(templates.JAR.format(
+ JAR=os.path.join(self._android_root, jar)))
+
def _generate_srcjars(self):
"""Generates the srcjar urls."""
for srcjar in self._mod_info[constant.KEY_SRCJARS]:
- self._srcjar_urls.append(templates.SRCJAR.format(
- SRCJAR=os.path.join(common_util.get_android_root_dir(),
- srcjar)))
+ self._srcjars.append(templates.SRCJAR.format(
+ SRCJAR=os.path.join(self._android_root, srcjar)))
+
+ def _generate_dependencies(self):
+ """Generates the dependency module urls."""
+ for dep in self._mod_info[constant.KEY_DEPENDENCIES]:
+ self._deps.append(templates.DEPENDENCIES.format(MODULE=dep))
def _create_iml(self):
"""Creates the iml file."""
- content = templates.IML.format(SRCJARS=''.join(self._srcjar_urls))
+ content = templates.IML.format(FACET=self._facet,
+ SOURCES=self._srcs,
+ JARS=''.join(self._jars),
+ SRCJARS=''.join(self._srcjars),
+ DEPENDENCIES=''.join(self._deps))
common_util.file_generate(self._iml_path, content)
diff --git a/aidegen/idea/iml_unittest.py b/aidegen/idea/iml_unittest.py
index bea1bfe..0aa64e9 100644
--- a/aidegen/idea/iml_unittest.py
+++ b/aidegen/idea/iml_unittest.py
@@ -22,6 +22,7 @@
import unittest
from unittest import mock
+from aidegen import templates
from aidegen.lib import common_util
from aidegen.idea import iml
@@ -36,13 +37,14 @@
"""Prepare the testdata related path."""
IMLGenUnittests._TEST_DIR = tempfile.mkdtemp()
module = {
- "module_name": "test",
- "path": [
- "a/b"
- ],
- "srcjars": [
- 'x/y.srcjar'
- ]
+ 'module_name': 'test',
+ 'iml_name': 'test_iml',
+ 'path': ['a/b'],
+ 'srcs': ['a/b/src'],
+ 'tests': ['a/b/tests'],
+ 'srcjars': ['x/y.srcjar'],
+ 'jars': ['s.jar'],
+ 'dependencies': ['m1']
}
with mock.patch.object(common_util, 'get_android_root_dir') as obj:
obj.return_value = IMLGenUnittests._TEST_DIR
@@ -61,41 +63,106 @@
def test_iml_path(self, mock_root_path):
"""Test iml_path."""
mock_root_path.return_value = IMLGenUnittests._TEST_DIR
- iml_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/test.iml')
+ iml_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/test_iml.iml')
self.assertEqual(self.iml.iml_path, iml_path)
@mock.patch.object(common_util, 'get_android_root_dir')
def test_create(self, mock_root_path):
"""Test create."""
mock_root_path.return_value = IMLGenUnittests._TEST_DIR
+ module_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b')
+ src_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/src')
+ test_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/tests')
srcjar_path = os.path.join(IMLGenUnittests._TEST_DIR, 'x/y.srcjar')
+ jar_path = os.path.join(IMLGenUnittests._TEST_DIR, 's.jar')
expected = """<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
+ <content url="file://{MODULE_PATH}">
+ <sourceFolder url="file://{SRC_PATH}" isTestSource="false" />
+ <sourceFolder url="file://{TEST_PATH}" isTestSource="true" />
+ </content>
+ <orderEntry type="sourceFolder" forTests="false" />
<content url="jar://{SRCJAR}!/">
<sourceFolder url="jar://{SRCJAR}!/" isTestSource="False" />
</content>
+ <orderEntry type="module" module-name="m1" />
+ <orderEntry type="module-library" exported="">
+ <library>
+ <CLASSES>
+ <root url="jar://{JAR}!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </orderEntry>
+ <orderEntry type="inheritedJdk" />
+ </component>
+</module>
+""".format(MODULE_PATH=module_path,
+ SRC_PATH=src_path,
+ TEST_PATH=test_path,
+ SRCJAR=srcjar_path,
+ JAR=jar_path)
+ self.iml.create({'srcs': True, 'srcjars': True, 'dependencies': True,
+ 'jars': True})
+ gen_iml = os.path.join(IMLGenUnittests._TEST_DIR,
+ self.iml._mod_info['path'][0],
+ self.iml._mod_info['iml_name'] + '.iml')
+ result = common_util.read_file_content(gen_iml)
+ self.assertEqual(result, expected)
+
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ def test_gen_dep_sources(self, mock_root_path):
+ """Test _generate_dep_srcs."""
+ mock_root_path.return_value = IMLGenUnittests._TEST_DIR
+ src_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/src')
+ test_path = os.path.join(IMLGenUnittests._TEST_DIR, 'a/b/tests')
+ expected = """<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://{SRC_PATH}">
+ <sourceFolder url="file://{SRC_PATH}" isTestSource="false" />
+ </content>
+ <content url="file://{TEST_PATH}">
+ <sourceFolder url="file://{TEST_PATH}" isTestSource="true" />
+ </content>
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="inheritedJdk" />
</component>
</module>
-""".format(SRCJAR=srcjar_path)
- self.iml.create({'srcjars': True})
+""".format(SRC_PATH=src_path,
+ TEST_PATH=test_path)
+ self.iml.create({'dep_srcs': True})
gen_iml = os.path.join(IMLGenUnittests._TEST_DIR,
self.iml._mod_info['path'][0],
- self.iml._mod_info['module_name'] + '.iml')
+ self.iml._mod_info['iml_name'] + '.iml')
result = common_util.read_file_content(gen_iml)
self.assertEqual(result, expected)
@mock.patch.object(iml.IMLGenerator, '_create_iml')
+ @mock.patch.object(iml.IMLGenerator, '_generate_dependencies')
@mock.patch.object(iml.IMLGenerator, '_generate_srcjars')
- def test_skip_create_iml(self, mock_gen_srcjars, mock_create_iml):
+ def test_skip_create_iml(self, mock_gen_srcjars, mock_gen_dep,
+ mock_create_iml):
"""Test skipping create_iml."""
- self.iml.create({'srcjars': False})
+ self.iml.create({'srcjars': False, 'dependencies': False})
self.assertFalse(mock_gen_srcjars.called)
+ self.assertFalse(mock_gen_dep.called)
self.assertFalse(mock_create_iml.called)
+ @mock.patch('os.path.exists')
+ def test_generate_facet(self, mock_exists):
+ """Test _generate_facet."""
+ mock_exists.return_value = False
+ self.iml._generate_facet()
+ self.assertEqual(self.iml._facet, '')
+ mock_exists.return_value = True
+ self.iml._generate_facet()
+ self.assertEqual(self.iml._facet, templates.FACET)
+
if __name__ == '__main__':
unittest.main()
diff --git a/aidegen/idea/xml_gen.py b/aidegen/idea/xml_gen.py
new file mode 100644
index 0000000..95f83cd
--- /dev/null
+++ b/aidegen/idea/xml_gen.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Creates the xml files.
+
+Usage example:
+ vcs = XMLGenerator(module_path, 'vcs.xml')
+ if not vcs.xml_obj:
+ # Create the file directly.
+ common_util.file_generate(vcs.xml_abspath, xml_content)
+ else:
+ # Add/remove elements to vcs.xml_obj by the methods of
+ # ElementTree.Element object.
+ vcs.xml_obj.append()
+ vcs.xml_obj.makeelement()
+ vcs.xml_obj.remove()
+ # Update the XML content.
+ vcs.create_xml()
+"""
+
+from __future__ import absolute_import
+
+import os
+
+from aidegen import constant
+from aidegen import templates
+from aidegen.lib import common_util
+from aidegen.lib import xml_util
+
+_GIT_PATH = ' <mapping directory="{GIT_DIR}" vcs="Git" />'
+
+
+class XMLGenerator:
+ """Creates the xml file.
+
+ Attributes:
+ _xml_abspath: A string of the XML's absolute path.
+ _xml_obj: An ElementTree object.
+ """
+
+ def __init__(self, module_abspath, xml_name):
+ """Initializes XMLGenerator.
+
+ Args:
+ module_abspath: A string of the module's absolute path.
+ xml_name: A string of the xml file name.
+ """
+ self._xml_abspath = os.path.join(module_abspath, constant.IDEA_FOLDER,
+ xml_name)
+ self._xml_obj = None
+ self.parse()
+
+ def parse(self):
+ """Parses the XML file to an ElementTree object."""
+ if os.path.exists(self._xml_abspath):
+ self._xml_obj = xml_util.parse_xml(self._xml_abspath)
+
+ @property
+ def xml_path(self):
+ """Gets the xml absolute path."""
+ return self._xml_abspath
+
+ @property
+ def xml_obj(self):
+ """Gets the xml object."""
+ return self._xml_obj
+
+ def find_elements_by_name(self, element_type, name):
+ """Finds the target elements by name attribute.
+
+ Args:
+ element_type: A string of element's type.
+ name: A string of element's name.
+
+ Return:
+ List: ElementTree's element objects.
+ """
+ return [e for e in self._xml_obj.findall(element_type)
+ if e.get('name') == name]
+
+ def create_xml(self):
+ """Creates the xml file."""
+ common_util.file_generate(self._xml_abspath, common_util.to_pretty_xml(
+ self._xml_obj.getroot()))
+
+
+def gen_vcs_xml(module_path, git_paths):
+ """Writes the git path into the .idea/vcs.xml.
+
+ For main module, the vcs.xml should include all modules' git path.
+ For the whole AOSP case, ignore creating the vcs.xml. Instead, add the
+ ignored Git paths in the workspace.xml.
+
+ Args:
+ module_path: Path of the module.
+ git_paths: A list of git paths.
+ """
+ git_mappings = [_GIT_PATH.format(GIT_DIR=p) for p in git_paths]
+ vcs = XMLGenerator(module_path, 'vcs.xml')
+ if module_path != common_util.get_android_root_dir() or not vcs.xml_obj:
+ common_util.file_generate(vcs.xml_path, templates.XML_VCS.format(
+ GIT_MAPPINGS='\n'.join(git_mappings)))
diff --git a/aidegen/idea/xml_gen_unittest.py b/aidegen/idea/xml_gen_unittest.py
new file mode 100644
index 0000000..d2b3b8b
--- /dev/null
+++ b/aidegen/idea/xml_gen_unittest.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for XMLGenerator."""
+
+import shutil
+import tempfile
+import unittest
+from unittest import mock
+
+from xml.etree import ElementTree
+
+from aidegen.lib import common_util
+from aidegen.idea import xml_gen
+
+
+# pylint: disable=protected-access
+class XMLGenUnittests(unittest.TestCase):
+ """Unit tests for XMLGenerator class."""
+
+ _TEST_DIR = None
+ _XML_NAME = 'test.xml'
+ _DEFAULT_XML = """<?xml version="1.0" encoding="UTF-8"?>
+<project version="4"></project>
+"""
+
+ def setUp(self):
+ """Prepare the testdata related path."""
+ XMLGenUnittests._TEST_DIR = tempfile.mkdtemp()
+ self.xml = xml_gen.XMLGenerator(self._TEST_DIR, self._XML_NAME)
+ common_util.file_generate(self.xml.xml_path, self._DEFAULT_XML)
+ self.xml.parse()
+
+ def tearDown(self):
+ """Clear the testdata related path."""
+ shutil.rmtree(self._TEST_DIR)
+
+ def test_find_elements_by_name(self):
+ """Test find_elements_by_name."""
+ node = self.xml.xml_obj.getroot()
+ ElementTree.SubElement(node, 'a', attrib={'name': 'b'})
+ elements = self.xml.find_elements_by_name('a', 'b')
+ self.assertEqual(len(elements), 1)
+
+ @mock.patch.object(common_util, 'to_pretty_xml')
+ @mock.patch.object(common_util, 'file_generate')
+ def test_create_xml(self, mock_file_gen, mock_pretty_xml):
+ """Test create_xml."""
+ self.xml.create_xml()
+ self.assertTrue(mock_file_gen.called)
+ self.assertTrue(mock_pretty_xml.called)
+
+
+class VCSGenUnittests(unittest.TestCase):
+ """Unit tests for generating vcs.xml."""
+
+ _TEST_DIR = None
+
+ def setUp(self):
+ """Prepare the testdata related path."""
+ VCSGenUnittests._TEST_DIR = tempfile.mkdtemp()
+
+ def tearDown(self):
+ """Clear the testdata related path."""
+ shutil.rmtree(self._TEST_DIR)
+
+ @mock.patch.object(common_util, 'file_generate')
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ @mock.patch.object(xml_gen, 'XMLGenerator')
+ def test_gen_vcs_xml(self, mock_xml_gen, mock_root_dir, mock_file_gen):
+ """Test gen_vcs_xml."""
+ mock_gen_xml = mock.Mock()
+ mock_xml_gen.return_value = mock_gen_xml
+ mock_xml_gen.xml_obj = None
+ mock_root_dir.return_value = self._TEST_DIR
+ xml_gen.gen_vcs_xml(self._TEST_DIR, [])
+ self.assertFalse(mock_file_gen.called)
+ mock_root_dir.return_value = '/a'
+ xml_gen.gen_vcs_xml(self._TEST_DIR, ['/a'])
+ self.assertTrue(mock_file_gen.called)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/aidegen/lib/common_util.py b/aidegen/lib/common_util.py
index 8e878ac..685ca10 100644
--- a/aidegen/lib/common_util.py
+++ b/aidegen/lib/common_util.py
@@ -24,6 +24,7 @@
import json
import logging
import os
+import re
import sys
import time
import xml.dom.minidom
@@ -195,7 +196,7 @@
True if the relative path contains a build target, otherwise false.
"""
return any(
- mod_path.startswith(rel_path)
+ is_source_under_relative_path(mod_path, rel_path)
for mod_path in atest_module_info.path_to_module_info)
@@ -268,7 +269,7 @@
err = FAKE_MODULE_ERROR.format(target)
logging.error(err)
raise errors.FakeModuleError(err)
- if not abs_path.startswith(get_android_root_dir()):
+ if not is_source_under_relative_path(abs_path, get_android_root_dir()):
err = OUTSIDE_ROOT_ERROR.format(abs_path)
logging.error(err)
raise errors.ProjectOutsideAndroidRootError(err)
@@ -302,7 +303,7 @@
"""
if not rel_path:
return get_android_root_dir()
- if rel_path.startswith(get_android_root_dir()):
+ if is_source_under_relative_path(rel_path, get_android_root_dir()):
return rel_path
return os.path.join(get_android_root_dir(), rel_path)
@@ -483,7 +484,8 @@
Returns:
True if source file is a project relative path file, otherwise False.
"""
- return source == relative_path or source.startswith(relative_path + os.sep)
+ return re.search(
+ constant.RE_INSIDE_PATH_CHECK.format(relative_path), source)
def remove_user_home_path(data):
@@ -545,7 +547,7 @@
@wraps(func)
def decorated(*args, **kwargs):
"""A wrapper function."""
- params = {x[0]: x[1] for x in zip(fparams, args)}
+ params = dict(zip(fparams, args))
for arg_name, arg_type in decls.items():
try:
arg_val = params[arg_name]
@@ -691,3 +693,36 @@
xml_string = xml_string.split("\n", 1)[1]
# Remove the weird newline issue from toprettyxml.
return os.linesep.join([s for s in xml_string.splitlines() if s.strip()])
+
+
+def to_boolean(str_bool):
+ """Converts a string to a boolean.
+
+ Args:
+ str_bool: A string in the expression of boolean type.
+
+ Returns:
+ A boolean True if the string is one of ('True', 'true', 'T', 't', '1')
+ else False.
+ """
+ return str_bool and str_bool.lower() in ('true', 't', '1')
+
+
+def find_git_root(relpath):
+ """Finds the parent directory which has a .git folder from the relpath.
+
+ Args:
+ relpath: A string of relative path.
+
+ Returns:
+ A string of the absolute path which contains a .git, otherwise, none.
+ """
+ dir_list = relpath.split(os.sep)
+ for i in range(len(dir_list), 0, -1):
+ real_path = os.path.join(get_android_root_dir(),
+ os.sep.join(dir_list[:i]),
+ constant.GIT_FOLDER_NAME)
+ if os.path.exists(real_path):
+ return os.path.dirname(real_path)
+ logging.warning('%s can\'t find its .git folder.', relpath)
+ return None
diff --git a/aidegen/lib/common_util_unittest.py b/aidegen/lib/common_util_unittest.py
index 1c527de..dc8d392 100644
--- a/aidegen/lib/common_util_unittest.py
+++ b/aidegen/lib/common_util_unittest.py
@@ -364,6 +364,30 @@
pretty_xml = common_util.to_pretty_xml(root)
self.assertEqual(pretty_xml, self._SAMPLE_XML_CONTENT)
+ def test_to_to_boolean(self):
+ """Test to_boolean function with conditions."""
+ self.assertTrue(common_util.to_boolean('True'))
+ self.assertTrue(common_util.to_boolean('true'))
+ self.assertTrue(common_util.to_boolean('T'))
+ self.assertTrue(common_util.to_boolean('t'))
+ self.assertTrue(common_util.to_boolean('1'))
+ self.assertFalse(common_util.to_boolean('False'))
+ self.assertFalse(common_util.to_boolean('false'))
+ self.assertFalse(common_util.to_boolean('F'))
+ self.assertFalse(common_util.to_boolean('f'))
+ self.assertFalse(common_util.to_boolean('0'))
+ self.assertFalse(common_util.to_boolean(''))
+
+ @mock.patch.object(os.path, 'exists')
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ def test_find_git_root(self, mock_get_root, mock_exist):
+ """Test find_git_root."""
+ mock_get_root.return_value = '/a/b'
+ mock_exist.return_value = True
+ self.assertEqual(common_util.find_git_root('c/d'), '/a/b/c/d')
+ mock_exist.return_value = False
+ self.assertEqual(common_util.find_git_root('c/d'), None)
+
# pylint: disable=unused-argument
def parse_rule(self, name, text):
diff --git a/aidegen/lib/config.py b/aidegen/lib/config.py
index c428905..a1309e6 100644
--- a/aidegen/lib/config.py
+++ b/aidegen/lib/config.py
@@ -38,6 +38,7 @@
import re
from aidegen import constant
+from aidegen import templates
from aidegen.lib import common_util
_DIR_LIB = 'lib'
@@ -62,36 +63,8 @@
_ENABLE_DEBUG_CONFIG_DIR = 'enable_debugger'
_ENABLE_DEBUG_CONFIG_FILE = 'enable_debugger.iml'
_ENABLE_DEBUG_DIR = os.path.join(_CONFIG_DIR, _ENABLE_DEBUG_CONFIG_DIR)
- _ANDROID_MANIFEST_FILE_NAME = 'AndroidManifest.xml'
_DIR_SRC = 'src'
_DIR_GEN = 'gen'
- _ANDROIDMANIFEST_CONTENT = """<?xml version="1.0" encoding="utf-8"?>
-<manifest xmlns:android="http://schemas.android.com/apk/res/android"
- android:versionCode="1"
- android:versionName="1.0" >
-</manifest>
- """
- # The xml template for enabling debugger.
- _XML_ENABLE_DEBUGGER = """<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="FacetManager">
- <facet type="android" name="Android">
- <configuration>
- <proGuardCfgFiles />
- </configuration>
- </facet>
- </component>
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$">
- <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
- <sourceFolder url="file://$MODULE_DIR$/gen" isTestSource="false" generated="true" />
- </content>
- <orderEntry type="jdk" jdkName="{ANDROID_SDK_VERSION}" jdkType="Android SDK" />
- <orderEntry type="sourceFolder" forTests="false" />
- </component>
-</module>
-"""
DEBUG_ENABLED_FILE_PATH = os.path.join(_ENABLE_DEBUG_DIR,
_ENABLE_DEBUG_CONFIG_FILE)
@@ -199,10 +172,9 @@
AIDEGen will generate it with default content to prevent the red
underline error in IntelliJ.
"""
- _file = os.path.join(self._ENABLE_DEBUG_DIR,
- self._ANDROID_MANIFEST_FILE_NAME)
+ _file = os.path.join(self._ENABLE_DEBUG_DIR, constant.ANDROID_MANIFEST)
if not os.path.exists(_file) or os.stat(_file).st_size == 0:
- common_util.file_generate(_file, self._ANDROIDMANIFEST_CONTENT)
+ common_util.file_generate(_file, templates.ANDROID_MANIFEST_CONTENT)
def _gen_enable_debugger_config(self, android_sdk_version):
"""Generate the enable_debugger.iml config file.
@@ -214,7 +186,7 @@
android_sdk_version: The version name of the Android Sdk in the
jdk.table.xml.
"""
- content = self._XML_ENABLE_DEBUGGER.format(
+ content = templates.XML_ENABLE_DEBUGGER.format(
ANDROID_SDK_VERSION=android_sdk_version)
common_util.file_generate(self.DEBUG_ENABLED_FILE_PATH, content)
diff --git a/aidegen/lib/eclipse_project_file_gen.py b/aidegen/lib/eclipse_project_file_gen.py
index 5f61131..5bc4b7e 100644
--- a/aidegen/lib/eclipse_project_file_gen.py
+++ b/aidegen/lib/eclipse_project_file_gen.py
@@ -165,7 +165,8 @@
Returns: A list of source folder paths.
"""
return [p for p in self.src_paths
- if not p.startswith(self.module_relpath)]
+ if not common_util.is_source_under_relative_path(
+ p, self.module_relpath)]
def _create_project_content(self):
"""Create the project file .project under the module."""
@@ -231,7 +232,8 @@
src_path_entries = []
for src in self.src_paths:
src_abspath = os.path.join(common_util.get_android_root_dir(), src)
- if src.startswith(self.module_relpath):
+ if common_util.is_source_under_relative_path(
+ src, self.module_relpath):
src = src.replace(self.module_relpath, '').strip(os.sep)
else:
src = os.path.join(constant.KEY_DEPENDENCIES, src)
diff --git a/aidegen/lib/module_info.py b/aidegen/lib/module_info.py
index 50c8567..a635d40 100644
--- a/aidegen/lib/module_info.py
+++ b/aidegen/lib/module_info.py
@@ -81,20 +81,19 @@
return False
@staticmethod
- def is_project_path_relative_module(mod_info, project_relative_path):
+ def is_project_path_relative_module(mod_info, rel_path):
"""Determine if the given project path is relative to the module.
The rules:
1. If constant.KEY_PATH not in mod_info, we can't tell if it's a
module return False.
- 2. If project_relative_path is empty, it's under Android root, return
- True.
- 3. If module's path equals or starts with project_relative_path
- return True, otherwise return False.
+ 2. If rel_path is empty, it's under Android root, return True.
+ 3. If module's path equals or starts with rel_path return True,
+ otherwise return False.
Args:
mod_info: the module-info dictionary of the checked module.
- project_relative_path: project's relative path
+ rel_path: project's relative path
Returns:
True if it's the given project path is relative to the module,
@@ -103,10 +102,9 @@
if constant.KEY_PATH not in mod_info:
return False
path = mod_info[constant.KEY_PATH][0]
- if project_relative_path == '':
+ if rel_path == '':
return True
if (constant.KEY_CLASS in mod_info
- and (path == project_relative_path
- or path.startswith(project_relative_path + os.sep))):
+ and common_util.is_source_under_relative_path(path, rel_path)):
return True
return False
diff --git a/aidegen/lib/native_module_info.py b/aidegen/lib/native_module_info.py
index 6388e91..1afeb96 100644
--- a/aidegen/lib/native_module_info.py
+++ b/aidegen/lib/native_module_info.py
@@ -96,7 +96,7 @@
continue
rel_path, _ = common_util.get_related_paths(self, target)
for path in self.path_to_module_info:
- if path.startswith(rel_path):
+ if common_util.is_source_under_relative_path(path, rel_path):
projects.extend(self.get_module_names(path))
return projects
diff --git a/aidegen/lib/native_module_info_unittest.py b/aidegen/lib/native_module_info_unittest.py
index 440bc5c..ce017c0 100644
--- a/aidegen/lib/native_module_info_unittest.py
+++ b/aidegen/lib/native_module_info_unittest.py
@@ -81,6 +81,16 @@
'/path/to/rebuild'
],
'module_name': _REBUILD_TARGET1
+ },
+ 'multiarch-eng': {
+ 'path': [
+ 'shared/path/to/be/used2-eng'
+ ],
+ 'srcs': [
+ 'shared/path/to/be/used2/multiarch-eng.cpp',
+ 'out/soong/.intermediates/shared/path/to/be/used2/gen/Iarch-eng.cpp'
+ ],
+ 'module_name': 'multiarch-eng'
}
}
_CC_MODULE_INFO = {
diff --git a/aidegen/lib/native_util.py b/aidegen/lib/native_util.py
index 6926818..874444a 100644
--- a/aidegen/lib/native_util.py
+++ b/aidegen/lib/native_util.py
@@ -79,9 +79,9 @@
"""
if not current_parent:
return abs_path
- if abs_path.startswith(current_parent):
+ if common_util.is_source_under_relative_path(abs_path, current_parent):
return current_parent
- if current_parent.startswith(abs_path):
+ if common_util.is_source_under_relative_path(current_parent, abs_path):
return abs_path
return _find_parent(
os.path.dirname(abs_path), os.path.dirname(current_parent))
@@ -218,6 +218,6 @@
True if any native project exists otherwise False.
"""
for path in path_to_module_info:
- if path.startswith(rel_path):
+ if common_util.is_source_under_relative_path(path, rel_path):
return True
return False
diff --git a/aidegen/lib/project_file_gen.py b/aidegen/lib/project_file_gen.py
index bac2521..9e05217 100644
--- a/aidegen/lib/project_file_gen.py
+++ b/aidegen/lib/project_file_gen.py
@@ -23,14 +23,16 @@
import logging
import os
-import pathlib
import shutil
from aidegen import constant
from aidegen import templates
+from aidegen.idea import iml
+from aidegen.idea import xml_gen
from aidegen.lib import common_util
from aidegen.lib import config
from aidegen.lib import project_config
+from aidegen.project import source_splitter
# FACET_SECTION is a part of iml, which defines the framework of the project.
_FACET_SECTION = '''\
@@ -39,12 +41,11 @@
</facet>'''
_SOURCE_FOLDER = (' <sourceFolder url='
'"file://%s" isTestSource="%s" />\n')
-_EXCLUDE_ITEM = ' <excludeFolder url="file://%s" />\n'
_CONTENT_URL = ' <content url="file://%s">\n'
_END_CONTENT = ' </content>\n'
_SRCJAR_URL = ('%s<content url="jar://{SRCJAR}!/">\n'
- '%s<sourceFolder url="jar://{SRCJAR}!/" isTestSource="False" />\n'
- '%s</content>') % (' ' * 8, ' ' * 12, ' ' * 8)
+ '%s<sourceFolder url="jar://{SRCJAR}!/" isTestSource="False" />'
+ '\n%s</content>') % (' ' * 8, ' ' * 12, ' ' * 8)
_ORDER_ENTRY = (' <orderEntry type="module-library" exported="">'
'<library><CLASSES><root url="jar://%s!/" /></CLASSES>'
'<JAVADOC /><SOURCES /></library></orderEntry>\n')
@@ -81,15 +82,6 @@
_IML_EXTENSION = '.iml'
_FRAMEWORK_JAR = os.sep + 'framework.jar'
_HIGH_PRIORITY_JARS = [_FRAMEWORK_JAR]
-# Temporarily exclude test-dump and src_stub folders to prevent symbols from
-# resolving failure by incorrect reference. These two folders should be removed
-# after b/136982078 is resolved.
-_EXCLUDE_FOLDERS = ['.idea', '.repo', 'art', 'bionic', 'bootable', 'build',
- 'dalvik', 'developers', 'device', 'hardware', 'kernel',
- 'libnativehelper', 'pdk', 'prebuilts', 'sdk', 'system',
- 'toolchain', 'tools', 'vendor', 'out',
- 'art/tools/ahat/src/test-dump',
- 'cts/common/device-side/device-info/src_stub']
_GIT_FOLDER_NAME = '.git'
# Support gitignore by symbolic link to aidegen/data/gitignore_template.
_GITIGNORE_FILE_NAME = '.gitignore'
@@ -108,16 +100,9 @@
class ProjectFileGenerator:
"""Project file generator.
- Class attributes:
- _USED_NAME_CACHE: A dict to cache already used iml project file names
- and prevent duplicated iml names from breaking IDEA.
-
Attributes:
project_info: A instance of ProjectInfo.
"""
- # b/121256503: Prevent duplicated iml names from breaking IDEA.
- # Use a map to cache in-using(already used) iml project file names.
- _USED_NAME_CACHE = dict()
def __init__(self, project_info):
"""ProjectFileGenerator initialize.
@@ -127,60 +112,6 @@
"""
self.project_info = project_info
- @classmethod
- def get_unique_iml_name(cls, abs_module_path):
- """Create a unique iml name if needed.
-
- If the name of last sub folder is used already, prefixing it with prior
- sub folder names as a candidate name. If finally, it's unique, storing
- in _USED_NAME_CACHE as: { abs_module_path:unique_name }. The cts case
- and UX of IDE view are the main reasons why using module path strategy
- but not name of module directly. Following is the detailed strategy:
- 1. While loop composes a sensible and shorter name, by checking unique
- to finish the loop and finally add to cache.
- Take ['cts', 'tests', 'app', 'ui'] an example, if 'ui' isn't
- occupied, use it, else try 'cts_ui', then 'cts_app_ui', the worst
- case is whole three candidate names are occupied already.
- 2. 'Else' for that while stands for no suitable name generated, so
- trying 'cts_tests_app_ui' directly. If it's still non unique, e.g.,
- module path cts/xxx/tests/app/ui occupied that name already,
- appending increasing sequence number to get a unique name.
-
- Args:
- abs_module_path: The absolute module path string.
-
- Return:
- String: A unique iml name.
- """
- if abs_module_path in cls._USED_NAME_CACHE:
- return cls._USED_NAME_CACHE[abs_module_path]
-
- uniq_name = abs_module_path.strip(os.sep).split(os.sep)[-1]
- if any(uniq_name == name for name in cls._USED_NAME_CACHE.values()):
- parent_path = os.path.relpath(abs_module_path,
- common_util.get_android_root_dir())
- sub_folders = parent_path.split(os.sep)
- zero_base_index = len(sub_folders) - 1
- # Start compose a sensible, shorter and unique name.
- while zero_base_index > 0:
- uniq_name = '_'.join(
- [sub_folders[0], '_'.join(sub_folders[zero_base_index:])])
- zero_base_index = zero_base_index - 1
- if uniq_name not in cls._USED_NAME_CACHE.values():
- break
- else:
- # b/133393638: To handle several corner cases.
- uniq_name_base = parent_path.strip(os.sep).replace(os.sep, '_')
- i = 0
- uniq_name = uniq_name_base
- while uniq_name in cls._USED_NAME_CACHE.values():
- i = i + 1
- uniq_name = '_'.join([uniq_name_base, str(i)])
- cls._USED_NAME_CACHE[abs_module_path] = uniq_name
- logging.debug('Unique name for module path of %s is %s.',
- abs_module_path, uniq_name)
- return uniq_name
-
def _generate_source_section(self, sect_name, is_test):
"""Generate specific section of the project file.
@@ -198,15 +129,12 @@
def generate_intellij_project_file(self, iml_path_list=None):
"""Generates IntelliJ project file.
+ # TODO(b/155346505): Move this method to idea folder.
+
Args:
iml_path_list: An optional list of submodule's iml paths, the
default value is None.
"""
- source_dict = self._generate_source_section('source_folder_path', False)
- source_dict.update(
- self._generate_source_section('test_folder_path', True))
- self.project_info.iml_path, _ = self._generate_iml(source_dict)
- self.project_info.git_path = self._get_project_git_path()
if self.project_info.is_main_project:
self._generate_modules_xml(iml_path_list)
self._copy_constant_project_files()
@@ -215,22 +143,21 @@
def generate_ide_project_files(cls, projects):
"""Generate IDE project files by a list of ProjectInfo instances.
- For multiple modules case, we call _generate_intellij_project_file to
- generate iml file for submodules first and pass submodules' iml file
- paths as an argument to function _generate_intellij_project_file when we
- generate main module.iml file. In this way, we can add submodules'
- dependencies iml and their own iml file paths to main module's
- module.xml.
+ It deals with the sources by ProjectSplitter to create iml files for
+ each project and generate_intellij_project_file only creates
+ the other project files under .idea/.
Args:
projects: A list of ProjectInfo instances.
"""
# Initialization
- cls._USED_NAME_CACHE.clear()
- _merge_all_shared_source_paths(projects)
- for project in projects[1:]:
- ProjectFileGenerator(project).generate_intellij_project_file()
- iml_paths = [project.iml_path for project in projects[1:]]
+ iml.IMLGenerator.USED_NAME_CACHE.clear()
+ proj_splitter = source_splitter.ProjectSplitter(projects)
+ proj_splitter.get_dependencies()
+ proj_splitter.revise_source_folders()
+ iml_paths = [proj_splitter.gen_framework_srcjars_iml()]
+ proj_splitter.gen_projects_iml()
+ iml_paths += [project.iml_path for project in projects]
ProjectFileGenerator(
projects[0]).generate_intellij_project_file(iml_paths)
_merge_project_vcs_xmls(projects)
@@ -290,7 +217,7 @@
"""
facet = ''
facet_path = self.project_info.project_absolute_path
- if os.path.isfile(os.path.join(facet_path, _ANDROID_MANIFEST)):
+ if os.path.isfile(os.path.join(facet_path, constant.ANDROID_MANIFEST)):
facet = _FACET_SECTION
return content.replace(_FACET_TOKEN, facet)
@@ -385,10 +312,12 @@
# If relative_path empty, it is Android root. When handling root
# module, we add the exclude folders to speed up indexing time.
if not relative_path:
- src_builder.extend(_get_exclude_content(root_path))
+ src_builder.extend(
+ source_splitter.get_exclude_content(root_path))
excludes = project_config.ProjectConfig.get_instance().exclude_paths
if excludes:
- src_builder.extend(_get_exclude_content(root_path, excludes))
+ src_builder.extend(
+ source_splitter.get_exclude_content(root_path, excludes))
src_builder.append(_END_CONTENT)
else:
for path, is_test_flag in sorted(source_dict.items()):
@@ -434,6 +363,9 @@
def _generate_iml(self, source_dict):
"""Generate iml file.
+ #TODO(b/155346505): Removes this method after the project files are
+ # created by ProjectSplitter.
+
Args:
source_dict: A dictionary of sources path with a flag to distinguish
the path is test or source folder in IntelliJ.
@@ -459,7 +391,7 @@
project_source_dict, True)
module_content = self._handle_srcjar_folder(module_content)
# b/121256503: Prevent duplicated iml names from breaking IDEA.
- module_name = self.get_unique_iml_name(module_path)
+ module_name = iml.IMLGenerator.get_unique_iml_name(module_path)
module_iml_path = os.path.join(module_path,
module_name + _IML_EXTENSION)
@@ -504,7 +436,7 @@
module_path = self.project_info.project_absolute_path
# b/121256503: Prevent duplicated iml names from breaking IDEA.
- module_name = self.get_unique_iml_name(module_path)
+ module_name = iml.IMLGenerator.get_unique_iml_name(module_path)
if iml_path_list is not None:
module_list = [
@@ -545,51 +477,6 @@
content = content.replace(_ENABLE_DEBUGGER_MODULE_TOKEN, '')
return content
- def _get_project_git_path(self):
- """Get the project's git path.
-
- Return:
- String: A module's git path.
- """
- module_path = self.project_info.project_absolute_path
- # When importing whole Android repo, it shouldn't add vcs.xml,
- # because IntelliJ doesn't handle repo as a version control.
- if module_path == common_util.get_android_root_dir():
- return None
- git_path = module_path
- while not os.path.isdir(os.path.join(git_path, _GIT_FOLDER_NAME)):
- git_path = str(pathlib.Path(git_path).parent)
- if git_path == os.sep:
- logging.warning('%s can\'t find its .git folder', module_path)
- return None
- return git_path
-
-
-def _get_exclude_content(root_path, excludes=None):
- """Get the exclude folder content list.
-
- It returns the exclude folders content list.
- e.g.
- ['<excludeFolder url="file://a/.idea" />',
- '<excludeFolder url="file://a/.repo" />']
-
- Args:
- root_path: Android source file path.
- excludes: A list of exclusive directories, the default value is None but
- will be assigned to _EXCLUDE_FOLDERS.
-
- Returns:
- String: exclude folder content list.
- """
- exclude_items = []
- if not excludes:
- excludes = _EXCLUDE_FOLDERS
- for folder in excludes:
- folder_path = os.path.join(root_path, folder)
- if os.path.isdir(folder_path):
- exclude_items.append(_EXCLUDE_ITEM % folder_path)
- return exclude_items
-
def _trim_same_root_source(source_list):
"""Trim the source which has the same root.
@@ -640,12 +527,10 @@
projects: A list of ProjectInfo instances.
"""
main_project_absolute_path = projects[0].project_absolute_path
- # TODO(b/154436905): Add the necessary git path to vcs.xml.
if main_project_absolute_path != common_util.get_android_root_dir():
- git_paths = [project.git_path for project in projects]
- _write_vcs_xml(main_project_absolute_path, git_paths)
- else:
- _write_vcs_xml(main_project_absolute_path, [])
+ git_paths = [common_util.find_git_root(project.project_relative_path)
+ for project in projects if project.project_relative_path]
+ xml_gen.gen_vcs_xml(main_project_absolute_path, git_paths)
def _get_all_git_path(root_path):
diff --git a/aidegen/lib/project_file_gen_unittest.py b/aidegen/lib/project_file_gen_unittest.py
index 3c53cc3..5556382 100644
--- a/aidegen/lib/project_file_gen_unittest.py
+++ b/aidegen/lib/project_file_gen_unittest.py
@@ -25,11 +25,14 @@
from aidegen import aidegen_main
from aidegen import templates
from aidegen import unittest_constants
+from aidegen.idea import iml
+from aidegen.idea import xml_gen
from aidegen.lib import common_util
from aidegen.lib import config
from aidegen.lib import project_config
from aidegen.lib import project_file_gen
from aidegen.lib import project_info
+from aidegen.project import source_splitter
from atest import module_info
@@ -52,16 +55,13 @@
'modules_only_self_module.xml')
_ENABLE_DEBUGGER_MODULE_SAMPLE = os.path.join(
_TEST_DATA_PATH, 'modules_with_enable_debugger.xml')
- _VCS_XML_SAMPLE = os.path.join(_TEST_DATA_PATH, 'vcs.xml')
_IML_PATH = os.path.join(_ANDROID_PROJECT_PATH, 'android_project.iml')
_DEPENDENCIES_IML_PATH = os.path.join(_ANDROID_PROJECT_PATH,
'dependencies.iml')
_IDEA_PATH = os.path.join(_ANDROID_PROJECT_PATH, '.idea')
_MODULE_PATH = os.path.join(_IDEA_PATH, 'modules.xml')
- _VCS_PATH = os.path.join(_IDEA_PATH, 'vcs.xml')
_SOURCE_SAMPLE = os.path.join(_TEST_DATA_PATH, 'source.iml')
_SRCJAR_SAMPLE = os.path.join(_TEST_DATA_PATH, 'srcjar.iml')
- _LOCAL_PATH_TOKEN = '@LOCAL_PATH@'
_AOSP_FOLDER = '/aosp'
_TEST_SOURCE_LIST = [
'a/b/c/d', 'a/b/c/d/e', 'a/b/c/d/e/f', 'a/b/c/d/f', 'e/f/a', 'e/f/b/c',
@@ -70,11 +70,6 @@
_ANDROID_SOURCE_RELATIVE_PATH = 'test_data/project'
_SAMPLE_CONTENT_LIST = ['a/b/c/d', 'e/f']
_SAMPLE_TRIMMED_SOURCE_LIST = ['a/b/c/d', 'e/f/a', 'e/f/b/c', 'e/f/g/h']
- _SAMPLE_EXCLUDE_FOLDERS = [
- ' <excludeFolder url="file://%s/.idea" />\n'
- % _TEST_DATA_PATH,
- ' <excludeFolder url="file://%s/out" />\n' % _TEST_DATA_PATH,
- ]
def _init_project_config(self, args):
"""Initialize project configurations."""
@@ -272,36 +267,24 @@
self._MAIN_MODULE_XML_SAMPLE)
self.assertEqual(test_module, sample_module)
- @mock.patch('os.path.isdir')
- @mock.patch('aidegen.lib.project_info.ProjectInfo')
- def test_get_project_git_path(self, mock_project, mock_isdir):
- """Test _get_project_git_path."""
- mock_project.project_absolute_path = '/a/b'
- mock_isdir.return_value = True
- expected_git_path = '/a/b'
- pfile_gen = project_file_gen.ProjectFileGenerator(mock_project)
- test_git_path = pfile_gen._get_project_git_path()
- self.assertEqual(test_git_path, expected_git_path)
-
+ @mock.patch.object(xml_gen, 'gen_vcs_xml')
@mock.patch('aidegen.lib.common_util.get_android_root_dir')
- @mock.patch('aidegen.lib.project_file_gen._get_all_git_path')
+ @mock.patch('common_util.find_git_root')
@mock.patch('aidegen.lib.project_info.ProjectInfo')
- def test_merge_project_vcs_xmls(self, mock_project, mock_get_all_git_path,
- mock_get_root):
+ def test_merge_project_vcs_xmls(self, mock_project, mock_get_git_root,
+ mock_get_root, mock_write):
"""Test _merge_project_vcs_xmls."""
- mock_project.project_absolute_path = (
- unittest_constants.ANDROID_PROJECT_PATH)
- mock_project.git_path = unittest_constants.ANDROID_PROJECT_PATH
+ mock_get_root.return_value = '/a/b'
+ mock_project.project_absolute_path = '/a/b/c'
+ mock_project.project_relative_path = 'c'
+ mock_get_git_root.return_value = '/a/b/c'
project_file_gen._merge_project_vcs_xmls([mock_project])
- test_vcs = common_util.read_file_content(self._VCS_PATH)
- sample_vcs = common_util.read_file_content(self._VCS_XML_SAMPLE)
- # The sample must base on the real path.
- sample_vcs = sample_vcs.replace(self._LOCAL_PATH_TOKEN,
- self._ANDROID_PROJECT_PATH)
- self.assertEqual(test_vcs, sample_vcs)
- mock_get_root.return_value = unittest_constants.ANDROID_PROJECT_PATH
+ self.assertTrue(mock_write.called_with('/a/b/c', '/a/b/c'))
+ mock_project.project_absolute_path = '/a/b'
+ mock_project.project_relative_path = None
+ mock_get_git_root.return_value = None
project_file_gen._merge_project_vcs_xmls([mock_project])
- self.assertFalse(mock_get_all_git_path.called)
+ self.assertTrue(mock_write.called_with('/a/b', [None]))
def test_get_uniq_iml_name(self):
"""Test the unique name cache mechanism.
@@ -332,7 +315,7 @@
path_list.append(path)
print('{} {}.'.format('path list with length:', len(path_list)))
- names = [project_file_gen.ProjectFileGenerator.get_unique_iml_name(f)
+ names = [iml.IMLGenerator.get_unique_iml_name(f)
for f in path_list]
print('{} {}.'.format('Names list with length:', len(names)))
@@ -441,12 +424,6 @@
project_file_gen._merge_all_shared_source_paths(projects)
self.assertEqual(mock_main_project.source_path, expected_result)
- def test_get_exclude_folders(self):
- """Test _get_exclude_folders."""
- exclude_folders = project_file_gen._get_exclude_content(
- self._TEST_DATA_PATH)
- self.assertEqual(self._SAMPLE_EXCLUDE_FOLDERS, exclude_folders)
-
@mock.patch('aidegen.lib.project_config.ProjectConfig')
@mock.patch('aidegen.lib.project_info.ProjectInfo')
def test_update_enable_debugger(self, mock_project, mock_config):
@@ -477,28 +454,18 @@
sample_source = common_util.read_file_content(self._SRCJAR_SAMPLE)
self.assertEqual(source, sample_source)
- @mock.patch.object(project_file_gen.ProjectFileGenerator,
- '_get_project_git_path')
- @mock.patch.object(project_file_gen.ProjectFileGenerator,
- '_generate_iml')
- @mock.patch.object(project_file_gen.ProjectFileGenerator,
- '_generate_source_section')
+ @mock.patch.object(common_util, 'find_git_root')
@mock.patch.object(project_file_gen.ProjectFileGenerator,
'_generate_modules_xml')
@mock.patch.object(project_info, 'ProjectInfo')
def test_generate_intellij_project_file(self, mock_project,
- mock_gen_xml, mock_gen_source,
- mock_gen_iml, mock_get_git_path):
+ mock_gen_xml, mock_get_git_path):
"""Test generate_intellij_project_file."""
mock_project.project_absolute_path = self._ANDROID_PROJECT_PATH
- mock_gen_source.return_value = {'a', 'true'}
- mock_gen_iml.return_value = ('iml/path', '')
mock_get_git_path.return_value = 'git/path'
project_gen = project_file_gen.ProjectFileGenerator(mock_project)
project_gen.project_info.is_main_project = False
project_gen.generate_intellij_project_file()
- self.assertEqual(project_gen.project_info.iml_path, 'iml/path')
- self.assertEqual(project_gen.project_info.git_path, 'git/path')
self.assertFalse(mock_gen_xml.called)
project_gen.project_info.is_main_project = True
project_gen.generate_intellij_project_file()
@@ -578,6 +545,38 @@
project_file_gen.gen_enable_debugger_module('a', 'b')
self.assertTrue(mock_update_module.called)
+ @mock.patch.object(project_config.ProjectConfig, 'get_instance')
+ @mock.patch.object(project_file_gen, '_merge_project_vcs_xmls')
+ @mock.patch.object(project_file_gen.ProjectFileGenerator,
+ 'generate_intellij_project_file')
+ @mock.patch.object(source_splitter.ProjectSplitter, 'gen_projects_iml')
+ @mock.patch.object(source_splitter.ProjectSplitter,
+ 'gen_framework_srcjars_iml')
+ @mock.patch.object(source_splitter.ProjectSplitter, 'revise_source_folders')
+ @mock.patch.object(source_splitter.ProjectSplitter, 'get_dependencies')
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ @mock.patch.object(project_info, 'ProjectInfo')
+ def test_generate_ide_project_files(self, mock_project, mock_get_root,
+ mock_get_dep, mock_revise_src,
+ mock_gen_framework_srcjars,
+ mock_gen_projects_iml, mock_gen_file,
+ mock_merge_vcs, mock_project_config):
+ """Test generate_ide_project_files."""
+ mock_get_root.return_value = '/aosp'
+ mock_project.project_absolute_path = '/aosp'
+ mock_project.project_relative_path = ''
+ project_cfg = mock.Mock()
+ mock_project_config.return_value = project_cfg
+ project_cfg.full_repo = True
+ gen_proj = project_file_gen.ProjectFileGenerator
+ gen_proj.generate_ide_project_files([mock_project])
+ self.assertTrue(mock_get_dep.called)
+ self.assertTrue(mock_revise_src.called)
+ self.assertTrue(mock_gen_framework_srcjars.called)
+ self.assertTrue(mock_gen_projects_iml.called)
+ self.assertTrue(mock_gen_file.called)
+ self.assertTrue(mock_merge_vcs.called)
+
if __name__ == '__main__':
unittest.main()
diff --git a/aidegen/lib/project_info.py b/aidegen/lib/project_info.py
index db2b8a1..dc23bdc 100644
--- a/aidegen/lib/project_info.py
+++ b/aidegen/lib/project_info.py
@@ -71,7 +71,6 @@
directory or it's subdirectories.
dep_modules: A dict has recursively dependent modules of
project_module_names.
- git_path: The project's git path.
iml_path: The project's iml file path.
source_path: A dictionary to keep following data:
source_folder_path: A set contains the source folder
@@ -90,6 +89,8 @@
The "!/" is a content descriptor for
compressed files in IntelliJ.
is_main_project: A boolean to verify the project is main project.
+ dependencies: A list of dependency projects' iml file names, e.g. base,
+ framework-all.
"""
modules_info = None
@@ -112,7 +113,6 @@
self.modules_info.get_module_names(rel_path))
self.project_relative_path = rel_path
self.project_absolute_path = abs_path
- self.git_path = ''
self.iml_path = ''
self._set_default_modues()
self._init_source_path()
@@ -122,6 +122,7 @@
self.dep_modules = self.get_dep_modules()
self._filter_out_modules()
self._display_convert_make_files_message()
+ self.dependencies = []
def _set_default_modues(self):
"""Append default hard-code modules, source paths and jar files.
@@ -197,7 +198,7 @@
if module_info.AidegenModuleInfo.is_target_module(data):
modules.add(name)
else:
- logging.debug(_NOT_TARGET, name, data['class'],
+ logging.debug(_NOT_TARGET, name, data.get('class', ''),
constant.TARGET_CLASSES)
return modules
diff --git a/aidegen/lib/source_locator.py b/aidegen/lib/source_locator.py
index 567c386..7a8e5a2 100644
--- a/aidegen/lib/source_locator.py
+++ b/aidegen/lib/source_locator.py
@@ -26,6 +26,7 @@
from aidegen import constant
from aidegen.lib import common_util
from aidegen.lib import module_info
+from aidegen.lib import project_config
# Parse package name from the package declaration line of a java.
# Group matches "foo.bar" of line "package foo.bar;" or "package foo.bar"
@@ -39,8 +40,6 @@
_TARGET_FILES = [_JAVA_EXT, _KOTLIN_EXT]
_JARJAR_RULES_FILE = 'jarjar-rules.txt'
_KEY_JARJAR_RULES = 'jarjar_rules'
-_KEY_JARS = 'jars'
-_KEY_TESTS = 'tests'
_NAME_AAPT2 = 'aapt2'
_TARGET_R_SRCJAR = 'R.srcjar'
_TARGET_AAPT2_SRCJAR = _NAME_AAPT2 + _SRCJAR_EXT
@@ -52,6 +51,8 @@
]
_ANDROID = 'android'
_REPACKAGES = 'repackaged'
+_FRAMEWORK_SRCJARS_PATH = os.path.join(constant.FRAMEWORK_PATH,
+ constant.FRAMEWORK_SRCJARS)
class ModuleData:
@@ -62,12 +63,13 @@
repo root.
module_path: A string of the relative path to the module.
- src_dirs: A set to keep the unique source folder relative paths.
- test_dirs: A set to keep the unique test folder relative paths.
- jar_files: A set to keep the unique jar file relative paths.
- r_java_paths: A set to keep the R folder paths to use in Eclipse.
- srcjar_paths: A set to keep the srcjar source root paths to use in
+ src_dirs: A list to keep the unique source folder relative paths.
+ test_dirs: A list to keep the unique test folder relative paths.
+ jar_files: A list to keep the unique jar file relative paths.
+ r_java_paths: A list to keep the R folder paths to use in Eclipse.
+ srcjar_paths: A list to keep the srcjar source root paths to use in
IntelliJ.
+ dep_paths: A list to keep the dependency modules' path.
referenced_by_jar: A boolean to check if the module is referenced by a
jar file.
build_targets: A set to keep the unique build target jar or srcjar file
@@ -115,6 +117,7 @@
self.jar_files = []
self.r_java_paths = []
self.srcjar_paths = []
+ self.dep_paths = []
self.referenced_by_jar = False
self.build_targets = set()
self.missing_jars = set()
@@ -243,7 +246,8 @@
def _is_android_supported_module(self):
"""Determine if this is an Android supported module."""
- return self.module_path.startswith(_ANDROID_SUPPORT_PATH_KEYWORD)
+ return common_util.is_source_under_relative_path(
+ self.module_path, _ANDROID_SUPPORT_PATH_KEYWORD)
def _check_jarjar_rules_exist(self):
"""Check if jarjar rules exist."""
@@ -252,7 +256,7 @@
def _check_jars_exist(self):
"""Check if jars exist."""
- return self._check_key(_KEY_JARS)
+ return self._check_key(constant.KEY_JARS)
def _check_classes_jar_exist(self):
"""Check if classes_jar exist."""
@@ -309,7 +313,7 @@
Returns:
True if module path is a test module path, otherwise False.
"""
- return _KEY_TESTS in src_dir.split(os.sep)
+ return constant.KEY_TESTS in src_dir.split(os.sep)
def _get_source_folder(self, java_file):
"""Parsing a java to get the package name to filter out source path.
@@ -479,8 +483,8 @@
},
Path to the jar file is prebuilts/misc/common/asm/asm-6.0.jar.
"""
- if self._check_key(_KEY_JARS):
- for jar_name in self.module_data[_KEY_JARS]:
+ if self._check_key(constant.KEY_JARS):
+ for jar_name in self.module_data[constant.KEY_JARS]:
if self._check_key(constant.KEY_INSTALLED):
self._append_jar_from_installed()
else:
@@ -566,6 +570,21 @@
if self.referenced_by_jar and self.missing_jars:
self.build_targets |= self.missing_jars
+ def _collect_dep_paths(self):
+ """Collects the path of dependency modules."""
+ config = project_config.ProjectConfig.get_instance()
+ modules_info = config.atest_module_info
+ self.dep_paths = []
+ if self.module_path != constant.FRAMEWORK_PATH:
+ self.dep_paths.append(constant.FRAMEWORK_PATH)
+ self.dep_paths.append(_FRAMEWORK_SRCJARS_PATH)
+ if self.module_path != constant.LIBCORE_PATH:
+ self.dep_paths.append(constant.LIBCORE_PATH)
+ for module in self.module_data.get(constant.KEY_DEPENDENCIES, []):
+ for path in modules_info.get_paths(module):
+ if path not in self.dep_paths and path != self.module_path:
+ self.dep_paths.append(path)
+
def locate_sources_path(self):
"""Locate source folders' paths or jar files."""
# Check if users need to reference source according to source depth.
diff --git a/aidegen/lib/source_locator_unittest.py b/aidegen/lib/source_locator_unittest.py
index ff2865c..b8636a4 100644
--- a/aidegen/lib/source_locator_unittest.py
+++ b/aidegen/lib/source_locator_unittest.py
@@ -24,7 +24,9 @@
from aidegen.lib import common_util
from aidegen.lib import module_info
+from aidegen.lib import project_config
from aidegen.lib import source_locator
+from atest import module_info as amodule_info
# pylint: disable=too-many-arguments
@@ -503,6 +505,52 @@
mod_data._append_classes_jar()
self.assertEqual(mod_data.jar_files, [])
+ @mock.patch.object(amodule_info, 'ModuleInfo')
+ @mock.patch.object(amodule_info.ModuleInfo, 'get_paths')
+ @mock.patch.object(project_config.ProjectConfig, 'get_instance')
+ def test_collect_dep_paths(self, mock_config, mock_get_paths,
+ mock_atest_module_info):
+ """Test _collect_dep_paths."""
+ mod_name = 'test'
+ mod_info = {
+ 'name': 'test',
+ 'path': ['frameworks/base'],
+ 'dependencies': ['test_module']
+ }
+ mod_data = source_locator.ModuleData(mod_name, mod_info, 0)
+ mock_instance = mock_config.return_value
+ mock_instance.atest_module_info = mock_atest_module_info
+ mock_instance.atest_module_info.get_paths = mock_get_paths
+ mock_get_paths.return_value = []
+ expected = [
+ 'frameworks/base/framework_srcjars',
+ 'libcore',
+ ]
+ mod_data._collect_dep_paths()
+ self.assertEqual(mod_data.dep_paths, expected)
+ mod_info['path'] = ['libcore']
+ mod_data = source_locator.ModuleData(mod_name, mod_info, 0)
+ expected = [
+ 'frameworks/base',
+ 'frameworks/base/framework_srcjars',
+ ]
+ mod_data._collect_dep_paths()
+ self.assertEqual(mod_data.dep_paths, expected)
+ mock_get_paths.return_value = ['test']
+ mod_info['path'] = ['test']
+ mod_data = source_locator.ModuleData(mod_name, mod_info, 0)
+ expected = [
+ 'frameworks/base',
+ 'frameworks/base/framework_srcjars',
+ 'libcore',
+ ]
+ mod_data._collect_dep_paths()
+ self.assertEqual(mod_data.dep_paths, expected)
+ mock_get_paths.return_value = ['dep/path']
+ expected.append('dep/path')
+ mod_data._collect_dep_paths()
+ self.assertEqual(mod_data.dep_paths, expected)
+
class EclipseModuleDataUnittests(unittest.TestCase):
"""Unit tests for the EclipseModuleData in module_data.py"""
diff --git a/aidegen/project/__init__.py b/aidegen/project/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/aidegen/project/__init__.py
diff --git a/aidegen/project/source_splitter.py b/aidegen/project/source_splitter.py
new file mode 100644
index 0000000..17ca12c
--- /dev/null
+++ b/aidegen/project/source_splitter.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Separate the sources from multiple projects."""
+
+import os
+
+from aidegen import constant
+from aidegen.idea import iml
+from aidegen.lib import common_util
+from aidegen.lib import project_config
+
+_KEY_SOURCE_PATH = 'source_folder_path'
+_KEY_TEST_PATH = 'test_folder_path'
+_SOURCE_FOLDERS = [_KEY_SOURCE_PATH, _KEY_TEST_PATH]
+_KEY_SRCJAR_PATH = 'srcjar_path'
+_KEY_R_PATH = 'r_java_path'
+_KEY_JAR_PATH = 'jar_path'
+_EXCLUDE_ITEM = '\n <excludeFolder url="file://%s" />'
+# Temporarily exclude test-dump and src_stub folders to prevent symbols from
+# resolving failure by incorrect reference. These two folders should be removed
+# after b/136982078 is resolved.
+_EXCLUDE_FOLDERS = ['.idea', '.repo', 'art', 'bionic', 'bootable', 'build',
+ 'dalvik', 'developers', 'device', 'hardware', 'kernel',
+ 'libnativehelper', 'pdk', 'prebuilts', 'sdk', 'system',
+ 'toolchain', 'tools', 'vendor', 'out',
+ 'art/tools/ahat/src/test-dump',
+ 'cts/common/device-side/device-info/src_stub']
+
+
+class ProjectSplitter:
+ """Splits the sources from multiple projects.
+
+ It's a specific solution to deal with the source folders in multiple
+ project case. Since the IntelliJ does not allow duplicate source folders,
+ AIDEGen needs to separate the source folders for each project. The single
+ project case has no different with current structure.
+
+ Usage:
+ project_splitter = ProjectSplitter(projects)
+
+ # Find the dependencies between the projects.
+ project_splitter.get_dependencies()
+
+ # Clear the source folders for each project.
+ project_splitter.revise_source_folders()
+
+ Attributes:
+ _projects: A list of ProjectInfo.
+ _all_srcs: A dictionary contains all sources of multiple projects.
+ e.g.
+ {
+ 'module_name': 'test',
+ 'path': ['path/to/module'],
+ 'srcs': ['src_folder1', 'src_folder2'],
+ 'tests': ['test_folder1', 'test_folder2']
+ 'jars': ['jar1.jar'],
+ 'srcjars': ['1.srcjar', '2.srcjar'],
+ 'dependencies': ['framework_srcjars', 'base'],
+ 'iml_name': '/abs/path/to/iml.iml'
+ }
+ _framework_exist: A boolean, True if framework is one of the projects.
+ _framework_iml: A string, the name of the framework's iml.
+ _full_repo: A boolean, True if loading with full Android sources.
+ _full_repo_iml: A string, the name of the Android folder's iml.
+ """
+ def __init__(self, projects):
+ """ProjectSplitter initialize.
+
+ Args:
+ projects: A list of ProjectInfo object.
+ """
+ self._projects = projects
+ self._all_srcs = dict(projects[0].source_path)
+ self._framework_iml = None
+ self._framework_exist = any(
+ {p.project_relative_path == constant.FRAMEWORK_PATH
+ for p in self._projects})
+ if self._framework_exist:
+ self._framework_iml = iml.IMLGenerator.get_unique_iml_name(
+ os.path.join(common_util.get_android_root_dir(),
+ constant.FRAMEWORK_PATH))
+ self._full_repo = project_config.ProjectConfig.get_instance().full_repo
+ if self._full_repo:
+ self._full_repo_iml = os.path.basename(
+ common_util.get_android_root_dir())
+
+ def revise_source_folders(self):
+ """Resets the source folders of each project.
+
+ There should be no duplicate source root path in IntelliJ. The issue
+ doesn't happen in single project case. Once users choose multiple
+ projects, there could be several same source paths of different
+ projects. In order to prevent that, we should remove the source paths
+ in dependencies.iml which are duplicate with the paths in [module].iml
+ files.
+
+ Steps to prevent the duplicate source root path in IntelliJ:
+ 1. Copy all sources from sub-projects to main project.
+ 2. Delete the source and test folders which are not under the
+ sub-projects.
+ 3. Delete the sub-projects' source and test paths from the main project.
+ """
+ self._collect_all_srcs()
+ self._keep_local_sources()
+ self._remove_duplicate_sources()
+
+ def _collect_all_srcs(self):
+ """Copies all projects' sources to a dictionary."""
+ for project in self._projects[1:]:
+ for key, value in project.source_path.items():
+ self._all_srcs[key].update(value)
+
+ def _keep_local_sources(self):
+ """Removes source folders which are not under the project's path.
+
+ 1. Remove the source folders which are not under the project.
+ 2. Remove the duplicate project's source folders from the _all_srcs.
+ """
+ for project in self._projects:
+ srcs = project.source_path
+ relpath = project.project_relative_path
+ is_root = not relpath
+ for key in _SOURCE_FOLDERS:
+ srcs[key] = {s for s in srcs[key]
+ if common_util.is_source_under_relative_path(
+ s, relpath) or is_root}
+ self._all_srcs[key] -= srcs[key]
+
+ def _remove_duplicate_sources(self):
+ """Removes the duplicate source folders from each sub project.
+
+ Priority processing with the longest path length, e.g.
+ frameworks/base/packages/SettingsLib must have priority over
+ frameworks/base.
+ """
+ for child in sorted(self._projects, key=lambda k: len(
+ k.project_relative_path), reverse=True):
+ for parent in self._projects:
+ is_root = not parent.project_relative_path
+ if parent is child:
+ continue
+ if (common_util.is_source_under_relative_path(
+ child.project_relative_path,
+ parent.project_relative_path) or is_root):
+ for key in _SOURCE_FOLDERS:
+ parent.source_path[key] -= child.source_path[key]
+
+ def get_dependencies(self):
+ """Gets the dependencies between the projects.
+
+ Check if the current project's source folder exists in other projects.
+ If do, the current project is a dependency module to the other.
+ """
+ for project in sorted(self._projects, key=lambda k: len(
+ k.project_relative_path)):
+ proj_path = project.project_relative_path
+ project.dependencies = [constant.FRAMEWORK_SRCJARS]
+ if self._framework_exist and proj_path != constant.FRAMEWORK_PATH:
+ project.dependencies.append(self._framework_iml)
+ if self._full_repo and proj_path:
+ project.dependencies.append(self._full_repo_iml)
+ srcs = (project.source_path[_KEY_SOURCE_PATH]
+ | project.source_path[_KEY_TEST_PATH])
+ for dep_proj in sorted(self._projects, key=lambda k: len(
+ k.project_relative_path)):
+ dep_path = dep_proj.project_relative_path
+ is_root = not dep_path
+ is_child = common_util.is_source_under_relative_path(dep_path,
+ proj_path)
+ is_dep = any({s for s in srcs
+ if common_util.is_source_under_relative_path(
+ s, dep_path) or is_root})
+ if dep_proj is project or is_child or not is_dep:
+ continue
+ dep = iml.IMLGenerator.get_unique_iml_name(os.path.join(
+ common_util.get_android_root_dir(), dep_path))
+ if dep not in project.dependencies:
+ project.dependencies.append(dep)
+ project.dependencies.append(constant.KEY_DEPENDENCIES)
+
+ def gen_framework_srcjars_iml(self):
+ """Generates the framework-srcjars.iml.
+
+ Create the iml file with only the srcjars of module framework-all. These
+ srcjars will be separated from the modules under frameworks/base.
+
+ Returns:
+ A string of the framework_srcjars.iml's absolute path.
+ """
+ mod = dict(self._projects[0].dep_modules[constant.FRAMEWORK_ALL])
+ mod[constant.KEY_DEPENDENCIES] = []
+ mod[constant.KEY_IML_NAME] = constant.FRAMEWORK_SRCJARS
+ if self._framework_exist:
+ mod[constant.KEY_DEPENDENCIES].append(self._framework_iml)
+ if self._full_repo:
+ mod[constant.KEY_DEPENDENCIES].append(self._full_repo_iml)
+ mod[constant.KEY_DEPENDENCIES].append(constant.KEY_DEPENDENCIES)
+ framework_srcjars_iml = iml.IMLGenerator(mod)
+ framework_srcjars_iml.create({constant.KEY_SRCJARS: True,
+ constant.KEY_DEPENDENCIES: True})
+ self._all_srcs[_KEY_SRCJAR_PATH] -= set(mod[constant.KEY_SRCJARS])
+ return framework_srcjars_iml.iml_path
+
+ def _gen_dependencies_iml(self):
+ """Generates the dependencies.iml."""
+ mod = {
+ constant.KEY_SRCS: self._all_srcs[_KEY_SOURCE_PATH],
+ constant.KEY_TESTS: self._all_srcs[_KEY_TEST_PATH],
+ constant.KEY_JARS: self._all_srcs[_KEY_JAR_PATH],
+ constant.KEY_SRCJARS: (self._all_srcs[_KEY_R_PATH]
+ | self._all_srcs[_KEY_SRCJAR_PATH]),
+ constant.KEY_DEPENDENCIES: [constant.FRAMEWORK_SRCJARS],
+ constant.KEY_PATH: [self._projects[0].project_relative_path],
+ constant.KEY_MODULE_NAME: constant.KEY_DEPENDENCIES,
+ constant.KEY_IML_NAME: constant.KEY_DEPENDENCIES
+ }
+ if self._framework_exist:
+ mod[constant.KEY_DEPENDENCIES].append(self._framework_iml)
+ if self._full_repo:
+ mod[constant.KEY_DEPENDENCIES].append(self._full_repo_iml)
+ dep_iml = iml.IMLGenerator(mod)
+ dep_iml.create({constant.KEY_DEP_SRCS: True,
+ constant.KEY_SRCJARS: True,
+ constant.KEY_JARS: True,
+ constant.KEY_DEPENDENCIES: True})
+
+ def gen_projects_iml(self):
+ """Generates the projects' iml file."""
+ root_path = common_util.get_android_root_dir()
+ excludes = project_config.ProjectConfig.get_instance().exclude_paths
+ for project in self._projects:
+ relpath = project.project_relative_path
+ exclude_folders = []
+ if not relpath:
+ exclude_folders.extend(get_exclude_content(root_path))
+ if excludes:
+ exclude_folders.extend(get_exclude_content(root_path, excludes))
+ mod_info = {
+ constant.KEY_EXCLUDES: ''.join(exclude_folders),
+ constant.KEY_SRCS: project.source_path[_KEY_SOURCE_PATH],
+ constant.KEY_TESTS: project.source_path[_KEY_TEST_PATH],
+ constant.KEY_DEPENDENCIES: project.dependencies,
+ constant.KEY_PATH: [relpath],
+ constant.KEY_MODULE_NAME: project.module_name,
+ constant.KEY_IML_NAME: iml.IMLGenerator.get_unique_iml_name(
+ os.path.join(root_path, relpath))
+ }
+ dep_iml = iml.IMLGenerator(mod_info)
+ dep_iml.create({constant.KEY_SRCS: True,
+ constant.KEY_DEPENDENCIES: True})
+ project.iml_path = dep_iml.iml_path
+ self._gen_dependencies_iml()
+
+
+def get_exclude_content(root_path, excludes=None):
+ """Get the exclude folder content list.
+
+ It returns the exclude folders content list.
+ e.g.
+ ['<excludeFolder url="file://a/.idea" />',
+ '<excludeFolder url="file://a/.repo" />']
+
+ Args:
+ root_path: Android source file path.
+ excludes: A list of exclusive directories, the default value is None but
+ will be assigned to _EXCLUDE_FOLDERS.
+
+ Returns:
+ String: exclude folder content list.
+ """
+ exclude_items = []
+ if not excludes:
+ excludes = _EXCLUDE_FOLDERS
+ for folder in excludes:
+ folder_path = os.path.join(root_path, folder)
+ if os.path.isdir(folder_path):
+ exclude_items.append(_EXCLUDE_ITEM % folder_path)
+ return exclude_items
diff --git a/aidegen/project/source_splitter_unittest.py b/aidegen/project/source_splitter_unittest.py
new file mode 100644
index 0000000..9908ddb
--- /dev/null
+++ b/aidegen/project/source_splitter_unittest.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+#
+# Copyright 2020 - The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Unittests for source_splitter."""
+
+import os
+import shutil
+import tempfile
+import unittest
+from unittest import mock
+
+from aidegen import unittest_constants
+from aidegen.idea import iml
+from aidegen.lib import common_util
+from aidegen.lib import project_config
+from aidegen.lib import project_info
+from aidegen.project import source_splitter
+
+
+# pylint: disable=protected-access
+class ProjectSplitterUnittest(unittest.TestCase):
+ """Unit tests for ProjectSplitter class."""
+
+ _TEST_DIR = None
+ _TEST_PATH = unittest_constants.TEST_DATA_PATH
+ _SAMPLE_EXCLUDE_FOLDERS = [
+ '\n <excludeFolder url="file://%s/.idea" />' % _TEST_PATH,
+ '\n <excludeFolder url="file://%s/out" />' % _TEST_PATH,
+ ]
+
+ def setUp(self):
+ """Prepare the testdata related data."""
+ projects = []
+ targets = ['a', 'b', 'c', 'framework']
+ ProjectSplitterUnittest._TEST_DIR = tempfile.mkdtemp()
+ for i, target in enumerate(targets):
+ with mock.patch.object(project_info, 'ProjectInfo') as proj_info:
+ projects.append(proj_info(target, i == 0))
+ projects[0].project_relative_path = 'src1'
+ projects[0].source_path = {
+ 'source_folder_path': {'src1', 'src2', 'other1'},
+ 'test_folder_path': {'src1/tests'},
+ 'jar_path': {'jar1.jar'},
+ 'jar_module_path': dict(),
+ 'r_java_path': set(),
+ 'srcjar_path': {'srcjar1.srcjar'}
+ }
+ projects[1].project_relative_path = 'src2'
+ projects[1].source_path = {
+ 'source_folder_path': {'src2', 'src2/src3', 'src2/lib', 'other2'},
+ 'test_folder_path': {'src2/tests'},
+ 'jar_path': set(),
+ 'jar_module_path': dict(),
+ 'r_java_path': set(),
+ 'srcjar_path': {'srcjar2.srcjar'}
+ }
+ projects[2].project_relative_path = 'src2/src3'
+ projects[2].source_path = {
+ 'source_folder_path': {'src2/src3', 'src2/lib'},
+ 'test_folder_path': {'src2/src3/tests'},
+ 'jar_path': {'jar3.jar'},
+ 'jar_module_path': dict(),
+ 'r_java_path': set(),
+ 'srcjar_path': {'srcjar3.srcjar'}
+ }
+ projects[3].project_relative_path = 'frameworks/base'
+ projects[3].source_path = {
+ 'source_folder_path': set(),
+ 'test_folder_path': set(),
+ 'jar_path': set(),
+ 'jar_module_path': dict(),
+ 'r_java_path': set(),
+ 'srcjar_path': {'framework.srcjar', 'other.srcjar'}
+ }
+ with mock.patch.object(project_config.ProjectConfig,
+ 'get_instance') as proj_cfg:
+ config = mock.Mock()
+ config.full_repo = False
+ proj_cfg.return_value = config
+ self.split_projs = source_splitter.ProjectSplitter(projects)
+
+ def tearDown(self):
+ """Clear the testdata related path."""
+ self.split_projs = None
+ shutil.rmtree(ProjectSplitterUnittest._TEST_DIR)
+ iml.IMLGenerator.USED_NAME_CACHE.clear()
+
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ @mock.patch.object(project_config.ProjectConfig, 'get_instance')
+ @mock.patch('builtins.any')
+ def test_init(self, mock_any, mock_project, mock_root):
+ """Test initialize the attributes."""
+ self.assertEqual(len(self.split_projs._projects), 4)
+ mock_any.return_value = False
+ mock_root.return_value = ProjectSplitterUnittest._TEST_DIR
+ with mock.patch.object(project_info, 'ProjectInfo') as proj_info:
+ config = mock.Mock()
+ config.full_repo = False
+ mock_project.return_value = config
+ project = source_splitter.ProjectSplitter(proj_info(['a'], True))
+ self.assertFalse(project._framework_exist)
+ config.full_repo = True
+ project = source_splitter.ProjectSplitter(proj_info(['a'], True))
+ self.assertEqual(project._full_repo_iml,
+ os.path.basename(
+ ProjectSplitterUnittest._TEST_DIR))
+
+ @mock.patch.object(source_splitter.ProjectSplitter,
+ '_remove_duplicate_sources')
+ @mock.patch.object(source_splitter.ProjectSplitter,
+ '_keep_local_sources')
+ @mock.patch.object(source_splitter.ProjectSplitter,
+ '_collect_all_srcs')
+ def test_revise_source_folders(self, mock_copy_srcs, mock_keep_srcs,
+ mock_remove_srcs):
+ """Test revise_source_folders."""
+ self.split_projs.revise_source_folders()
+ self.assertTrue(mock_copy_srcs.called)
+ self.assertTrue(mock_keep_srcs.called)
+ self.assertTrue(mock_remove_srcs.called)
+
+ def test_collect_all_srcs(self):
+ """Test _collect_all_srcs."""
+ self.split_projs._collect_all_srcs()
+ sources = self.split_projs._all_srcs
+ expected_srcs = {'src1', 'src2', 'src2/src3', 'src2/lib', 'other1',
+ 'other2'}
+ self.assertEqual(sources['source_folder_path'], expected_srcs)
+ expected_tests = {'src1/tests', 'src2/tests', 'src2/src3/tests'}
+ self.assertEqual(sources['test_folder_path'], expected_tests)
+
+ def test_keep_local_sources(self):
+ """Test _keep_local_sources."""
+ self.split_projs._collect_all_srcs()
+ self.split_projs._keep_local_sources()
+ srcs1 = self.split_projs._projects[0].source_path
+ srcs2 = self.split_projs._projects[1].source_path
+ srcs3 = self.split_projs._projects[2].source_path
+ all_srcs = self.split_projs._all_srcs
+ expected_srcs1 = {'src1'}
+ expected_srcs2 = {'src2', 'src2/src3', 'src2/lib'}
+ expected_srcs3 = {'src2/src3'}
+ expected_all_srcs = {'other1', 'other2'}
+ expected_all_tests = set()
+ self.assertEqual(srcs1['source_folder_path'], expected_srcs1)
+ self.assertEqual(srcs2['source_folder_path'], expected_srcs2)
+ self.assertEqual(srcs3['source_folder_path'], expected_srcs3)
+ self.assertEqual(all_srcs['source_folder_path'], expected_all_srcs)
+ self.assertEqual(all_srcs['test_folder_path'], expected_all_tests)
+
+ def test_remove_duplicate_sources(self):
+ """Test _remove_duplicate_sources."""
+ self.split_projs._collect_all_srcs()
+ self.split_projs._keep_local_sources()
+ self.split_projs._remove_duplicate_sources()
+ srcs2 = self.split_projs._projects[1].source_path
+ srcs3 = self.split_projs._projects[2].source_path
+ expected_srcs2 = {'src2', 'src2/lib'}
+ expected_srcs3 = {'src2/src3'}
+ self.assertEqual(srcs2['source_folder_path'], expected_srcs2)
+ self.assertEqual(srcs3['source_folder_path'], expected_srcs3)
+
+ def test_get_dependencies(self):
+ """Test get_dependencies."""
+ iml.IMLGenerator.USED_NAME_CACHE.clear()
+ self.split_projs.get_dependencies()
+ dep1 = ['framework_srcjars', 'base', 'src2', 'dependencies']
+ dep2 = ['framework_srcjars', 'base', 'dependencies']
+ dep3 = ['framework_srcjars', 'base', 'src2', 'dependencies']
+ self.assertEqual(self.split_projs._projects[0].dependencies, dep1)
+ self.assertEqual(self.split_projs._projects[1].dependencies, dep2)
+ self.assertEqual(self.split_projs._projects[2].dependencies, dep3)
+
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ def test_gen_framework_srcjars_iml(self, mock_root):
+ """Test gen_framework_srcjars_iml."""
+ mock_root.return_value = self._TEST_DIR
+ self.split_projs._projects[0].dep_modules = {
+ 'framework-all': {
+ 'module_name': 'framework-all',
+ 'path': ['frameworks/base'],
+ 'srcjars': ['framework.srcjar'],
+ 'iml_name': 'framework_srcjars'
+ }
+ }
+ self.split_projs._framework_exist = False
+ self.split_projs.gen_framework_srcjars_iml()
+ expected_srcjars = [
+ 'other.srcjar',
+ 'srcjar1.srcjar',
+ 'srcjar2.srcjar',
+ 'srcjar3.srcjar',
+ ]
+ expected_path = os.path.join(self._TEST_DIR,
+ 'frameworks/base/framework_srcjars.iml')
+ self.split_projs._framework_exist = True
+ self.split_projs.revise_source_folders()
+ iml_path = self.split_projs.gen_framework_srcjars_iml()
+ srcjars = self.split_projs._all_srcs['srcjar_path']
+ self.assertEqual(sorted(list(srcjars)), expected_srcjars)
+ self.assertEqual(iml_path, expected_path)
+
+ @mock.patch.object(iml.IMLGenerator, 'create')
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ def test_gen_dependencies_iml(self, mock_root, mock_create_iml):
+ """Test _gen_dependencies_iml."""
+ mock_root.return_value = self._TEST_DIR
+ self.split_projs.revise_source_folders()
+ self.split_projs._framework_exist = False
+ self.split_projs._gen_dependencies_iml()
+ self.split_projs._framework_exist = True
+ self.split_projs._gen_dependencies_iml()
+ self.assertTrue(mock_create_iml.called)
+
+ @mock.patch.object(source_splitter, 'get_exclude_content')
+ @mock.patch.object(project_config.ProjectConfig, 'get_instance')
+ @mock.patch.object(iml.IMLGenerator, 'create')
+ @mock.patch.object(common_util, 'get_android_root_dir')
+ def test_gen_projects_iml(self, mock_root, mock_create_iml, mock_project,
+ mock_get_excludes):
+ """Test gen_projects_iml."""
+ mock_root.return_value = self._TEST_DIR
+ config = mock.Mock()
+ mock_project.return_value = config
+ config.exclude_paths = []
+ self.split_projs.revise_source_folders()
+ self.split_projs.gen_projects_iml()
+ self.assertTrue(mock_create_iml.called)
+ self.assertFalse(mock_get_excludes.called)
+ config.exclude_paths = ['a']
+ self.split_projs.gen_projects_iml()
+ self.assertTrue(mock_get_excludes.called)
+
+ def test_get_exclude_content(self):
+ """Test get_exclude_content."""
+ exclude_folders = source_splitter.get_exclude_content(self._TEST_PATH)
+ self.assertEqual(self._SAMPLE_EXCLUDE_FOLDERS, exclude_folders)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/aidegen/sdk/jdk_table.py b/aidegen/sdk/jdk_table.py
index 583cbb0..84a7102 100644
--- a/aidegen/sdk/jdk_table.py
+++ b/aidegen/sdk/jdk_table.py
@@ -104,9 +104,11 @@
self._jdk_path = jdk_path
self._default_android_sdk_path = default_android_sdk_path
self._xml = None
- xml_file = self._DEFAULT_JDK_TABLE_XML
if os.path.exists(config_file):
xml_file = config_file
+ else:
+ xml_file = self._DEFAULT_JDK_TABLE_XML
+ common_util.file_generate(xml_file, templates.JDK_TABLE_XML)
self._xml = xml_util.parse_xml(xml_file)
self._platform_version = None
self._android_sdk_version = None
diff --git a/aidegen/sdk/jdk_table_unittest.py b/aidegen/sdk/jdk_table_unittest.py
index 937be41..7af9179 100644
--- a/aidegen/sdk/jdk_table_unittest.py
+++ b/aidegen/sdk/jdk_table_unittest.py
@@ -53,9 +53,10 @@
self.jdk_table_xml = None
shutil.rmtree(JDKTableXMLUnittests._TEST_DIR)
+ @mock.patch.object(common_util, 'file_generate')
@mock.patch('os.path.exists')
@mock.patch.object(ElementTree, 'parse')
- def test_init(self, mock_parse, mock_exists):
+ def test_init(self, mock_parse, mock_exists, mock_gen_file):
"""Test initialize the attributes."""
self.assertEqual(self.jdk_table_xml._platform_version, None)
self.assertEqual(self.jdk_table_xml._android_sdk_version, None)
@@ -67,6 +68,7 @@
mock_exists.return_value = False
jdk_table.JDKTableXML(None, None, None, None)
self.assertTrue(mock_parse.called)
+ self.assertTrue(mock_gen_file.called)
def test_android_sdk_version(self):
"""Test android_sdk_version."""
diff --git a/aidegen/templates.py b/aidegen/templates.py
index ae7dd62..7f37638 100644
--- a/aidegen/templates.py
+++ b/aidegen/templates.py
@@ -31,19 +31,43 @@
"""
# TODO(b/153704028): Refactor to create iml file.
IML = """<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
+<module type="JAVA_MODULE" version="4">{FACET}
<component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />{SRCJARS}
- <orderEntry type="sourceFolder" forTests="false" />
+ <exclude-output />{SOURCES}
+ <orderEntry type="sourceFolder" forTests="false" />{SRCJARS}{DEPENDENCIES}{JARS}
<orderEntry type="inheritedJdk" />
</component>
</module>
"""
-
+FACET = """
+ <facet type="android" name="Android">
+ <configuration />
+ </facet>"""
+CONTENT = """
+ <content url="file://{MODULE_PATH}">{EXCLUDES}{SOURCES}
+ </content>"""
+SOURCE = """
+ <sourceFolder url="file://{SRC}" isTestSource="{IS_TEST}" />"""
+OTHER_SOURCE = """
+ <content url="file://{SRC}">
+ <sourceFolder url="file://{SRC}" isTestSource="{IS_TEST}" />
+ </content>"""
SRCJAR = """
<content url="jar://{SRCJAR}!/">
<sourceFolder url="jar://{SRCJAR}!/" isTestSource="False" />
</content>"""
+JAR = """
+ <orderEntry type="module-library" exported="">
+ <library>
+ <CLASSES>
+ <root url="jar://{JAR}!/" />
+ </CLASSES>
+ <JAVADOC />
+ <SOURCES />
+ </library>
+ </orderEntry>"""
+DEPENDENCIES = """
+ <orderEntry type="module" module-name="{MODULE}" />"""
# The template content of modules.xml.
XML_MODULES = """<?xml version="1.0" encoding="UTF-8"?>
@@ -61,7 +85,7 @@
XML_VCS = """<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
-@VCS@
+{GIT_MAPPINGS}
</component>
</project>
"""
@@ -368,4 +392,41 @@
{LINKEDRESOURCES}
</linkedResources>
</projectDescription>
-"""
\ No newline at end of file
+"""
+
+# The template of default AndroidManifest.xml.
+ANDROID_MANIFEST_CONTENT = """<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ android:versionCode="1"
+ android:versionName="1.0" >
+</manifest>
+"""
+
+# The xml template for enabling debugger.
+XML_ENABLE_DEBUGGER = """<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="android" name="Android">
+ <configuration>
+ <proGuardCfgFiles />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$">
+ <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
+ <sourceFolder url="file://$MODULE_DIR$/gen" isTestSource="false" generated="true" />
+ </content>
+ <orderEntry type="jdk" jdkName="{ANDROID_SDK_VERSION}" jdkType="Android SDK" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ </component>
+</module>
+"""
+
+# The default empty template of the jdk.table.xml.
+JDK_TABLE_XML = """<application>
+ <component name="ProjectJdkTable">
+ </component>
+</application>
+"""
diff --git a/aidegen/test_data/vcs.xml b/aidegen/test_data/vcs.xml
deleted file mode 100644
index 81a97f3..0000000
--- a/aidegen/test_data/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="VcsDirectoryMappings">
- <mapping directory="@LOCAL_PATH@" vcs="Git" />
- </component>
-</project>
diff --git a/aidegen/vscode/__init__.py b/aidegen/vscode/__init__.py
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/aidegen/vscode/__init__.py
diff --git a/aidegen_functional_test/aidegen_functional_test_main.py b/aidegen_functional_test/aidegen_functional_test_main.py
index 7214d03..8615943 100644
--- a/aidegen_functional_test/aidegen_functional_test_main.py
+++ b/aidegen_functional_test/aidegen_functional_test_main.py
@@ -741,6 +741,7 @@
"""
args = _parse_args(argv)
common_util.configure_logging(args.verbose)
+ os.environ[constant.AIDEGEN_TEST_MODE] = 'true'
if args.create_sample:
_create_some_sample_json_file(args.targets)
elif args.use_cases_verified:
@@ -756,6 +757,7 @@
_test_some_sample_iml()
else:
_test_some_sample_iml(args.targets)
+ del os.environ[constant.AIDEGEN_TEST_MODE]
if __name__ == '__main__':
diff --git a/aidegen_functional_test/test_data/verify_binary_upload.json b/aidegen_functional_test/test_data/verify_binary_upload.json
index b3fc26d..ccb8af3 100644
--- a/aidegen_functional_test/test_data/verify_binary_upload.json
+++ b/aidegen_functional_test/test_data/verify_binary_upload.json
@@ -10,5 +10,9 @@
"test Settings framework": ["aidegen Settings framework -n -s"],
"test framework launch Android Studio": ["aidegen framework -i s -n -s"],
"test framework launch Eclipse": ["aidegen framework -i e -n -s"],
+ "test frameworks/native launch CLion": ["aidegen frameworks/native -i c -n -s"],
+ "test framework launch VSCode": ["aidegen framework -i v -n -s"],
+ "test Settings framework launch VSCode": ["aidegen Settings framework -i v -n -s"],
+ "test framework exclude paths": ["aidegen framework -e frameworks/base/test-mock frameworks/base/test-runner -n -s"],
"test help": ["aidegen -h"]
}
diff --git a/aidegen_functional_test/test_data/verify_commands.json b/aidegen_functional_test/test_data/verify_commands.json
index 88da032..47ed6dd 100644
--- a/aidegen_functional_test/test_data/verify_commands.json
+++ b/aidegen_functional_test/test_data/verify_commands.json
@@ -62,6 +62,18 @@
"aidegen_main.main(['-n', '-v', '-a'])",
"os.chdir('../..')"
],
+ "aidegen frameworks/native -i c": [
+ "aidegen_main.main(['frameworks/native', '-i', 'c', '-n'])"
+ ],
+ "aidegen framework -i v": [
+ "aidegen_main.main(['framework', '-i', 'v', '-n'])"
+ ],
+ "aidegen Settings framework -i v": [
+ "aidegen_main.main(['Settings', 'framework', '-i', 'v', '-n'])"
+ ],
+ "aidegen framework exclude paths": [
+ "aidegen_main.main(['framework', '-e', 'frameworks/base/test-mock', 'frameworks/base/test-runner', '-n'])"
+ ],
"aidegen -h": [
"aidegen_main.main(['-h'])"
]
diff --git a/asuite.sh b/asuite.sh
index 3b78958..5f1cef8 100755
--- a/asuite.sh
+++ b/asuite.sh
@@ -15,7 +15,7 @@
# Main function.
function _asuite_main() {
local T="$(gettop)/tools"
- src_atest="$T/tradefederation/core/atest/atest_completion.sh"
+ src_atest="$T/asuite/atest/atest_completion.sh"
src_acloud="$T/acloud/acloud_completion.sh"
src_aidegen="$T/asuite/aidegen/aidegen_completion.sh"
declare -a asuite_srcs=($src_atest $src_acloud $src_aidegen)
diff --git a/asuite_plugin/src/java/com/android/atest/commandAdapter/CommandRunner.java b/asuite_plugin/src/java/com/android/atest/commandAdapter/CommandRunner.java
index caab2b4..e57a6c2 100644
--- a/asuite_plugin/src/java/com/android/atest/commandAdapter/CommandRunner.java
+++ b/asuite_plugin/src/java/com/android/atest/commandAdapter/CommandRunner.java
@@ -42,7 +42,8 @@
public class CommandRunner {
private static final Logger LOG = Logger.getInstance(CommandRunner.class);
- private static final String ATEST_COMMAND_PREFIX = "source build/envsetup.sh && lunch ";
+ private static final String ATEST_COMMAND_PREFIX =
+ "export USER_FROM_TOOL=\"IntelliJ_Atest_plugin\" && source build/envsetup.sh && lunch ";
private static KillableColoredProcessHandler sProcessHandler;
private PtyCommandLine mCommand;
private ProcessListener mProcessListener;
diff --git a/asuite_plugin/src/test/unittests/CommandRunnerTest.java b/asuite_plugin/src/test/unittests/CommandRunnerTest.java
index 63b9835..cca86dd 100644
--- a/asuite_plugin/src/test/unittests/CommandRunnerTest.java
+++ b/asuite_plugin/src/test/unittests/CommandRunnerTest.java
@@ -58,6 +58,7 @@
Assert.assertSame(commandLine.getCharset(), StandardCharsets.UTF_8);
Assert.assertEquals(
commandLine.getCommandLineString(),
- "/bin/bash -c \"source build/envsetup.sh && lunch a && atest b\"");
+ "/bin/bash -c \"export USER_FROM_TOOL=\\\"IntelliJ_Atest_plugin\\\" "
+ + "&& source build/envsetup.sh && lunch a && atest b\"");
}
}
diff --git a/atest/atest.py b/atest/atest.py
index 5b465b0..d90cd67 100755
--- a/atest/atest.py
+++ b/atest/atest.py
@@ -196,6 +196,7 @@
'retry_any_failure': constants.RETRY_ANY_FAILURE,
'serial': constants.SERIAL,
'sharding': constants.SHARDING,
+ 'tf_debug': constants.TF_DEBUG,
'tf_template': constants.TF_TEMPLATE,
'user_type': constants.USER_TYPE}
not_match = [k for k in arg_maps if k not in vars(args)]
@@ -517,7 +518,7 @@
# List failed tests at the end as a reminder.
if failed_tests:
atest_utils.colorful_print(
- '\n==============================', constants.YELLOW)
+ atest_utils.delimiter('=', 30, prenl=1), constants.YELLOW)
atest_utils.colorful_print(
'\nFollowing tests failed:', constants.MAGENTA)
for failure in failed_tests:
@@ -557,9 +558,10 @@
testable_modules = mod_info.get_testable_modules(suite)
print('\n%s' % atest_utils.colorize('%s Testable %s modules' % (
len(testable_modules), suite), constants.CYAN))
- print('-------')
+ print(atest_utils.delimiter('-'))
for module in sorted(testable_modules):
print('\t%s' % module)
+
def _is_inside_android_root():
"""Identify whether the cwd is inside of Android source tree.
@@ -569,6 +571,75 @@
build_top = os.getenv(constants.ANDROID_BUILD_TOP, ' ')
return build_top in os.getcwd()
+def _non_action_validator(args):
+ """Method for non-action arguments such as --version, --help, --history,
+ --latest_result, etc.
+
+ Args:
+ args: An argspace.Namespace class instance holding parsed args.
+ """
+ if not _is_inside_android_root():
+ atest_utils.colorful_print(
+ "\nAtest must always work under ${}!".format(
+ constants.ANDROID_BUILD_TOP), constants.RED)
+ sys.exit(constants.EXIT_CODE_OUTSIDE_ROOT)
+ if args.version:
+ if os.path.isfile(constants.VERSION_FILE):
+ with open(constants.VERSION_FILE) as version_file:
+ print(version_file.read())
+ sys.exit(constants.EXIT_CODE_SUCCESS)
+ if args.help:
+ atest_arg_parser.print_epilog_text()
+ sys.exit(constants.EXIT_CODE_SUCCESS)
+ if args.history:
+ atest_execution_info.print_test_result(constants.ATEST_RESULT_ROOT,
+ args.history)
+ sys.exit(constants.EXIT_CODE_SUCCESS)
+ if args.latest_result:
+ atest_execution_info.print_test_result_by_path(
+ constants.LATEST_RESULT_FILE)
+ sys.exit(constants.EXIT_CODE_SUCCESS)
+ # TODO(b/131879842): remove below statement after they are fully removed.
+ if any((args.detect_regression,
+ args.generate_baseline,
+ args.generate_new_metrics)):
+ stop_msg = ('Please STOP using arguments below -- they are obsolete and '
+ 'will be removed in a very near future:\n'
+ '\t--detect-regression\n'
+ '\t--generate-baseline\n'
+ '\t--generate-new-metrics\n')
+ msg = ('Please use below arguments instead:\n'
+ '\t--iterations\n'
+ '\t--rerun-until-failure\n'
+ '\t--retry-any-failure\n')
+ atest_utils.colorful_print(stop_msg, constants.RED)
+ atest_utils.colorful_print(msg, constants.CYAN)
+
+def _dry_run_validator(args, results_dir, extra_args, test_infos):
+ """Method which process --dry-run argument.
+
+ Args:
+ args: An argspace.Namespace class instance holding parsed args.
+ result_dir: A string path of the results dir.
+ extra_args: A dict of extra args for test runners to utilize.
+ test_infos: A list of test_info.
+ """
+ args.tests.sort()
+ dry_run_cmds = _dry_run(results_dir, extra_args, test_infos)
+ if args.verify_cmd_mapping:
+ try:
+ atest_utils.handle_test_runner_cmd(' '.join(args.tests),
+ dry_run_cmds,
+ do_verification=True)
+ except atest_error.DryRunVerificationError as e:
+ atest_utils.colorful_print(str(e), constants.RED)
+ return constants.EXIT_CODE_VERIFY_FAILURE
+ if args.update_cmd_mapping:
+ atest_utils.handle_test_runner_cmd(' '.join(args.tests),
+ dry_run_cmds)
+ sys.exit(constants.EXIT_CODE_SUCCESS)
+
+
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
# pylint: disable=too-many-return-statements
@@ -586,28 +657,13 @@
_configure_logging(args.verbose)
_validate_args(args)
metrics_utils.get_start_time()
+ os_pyver = '{}:{}'.format(platform.platform(), platform.python_version())
metrics.AtestStartEvent(
command_line=' '.join(argv),
test_references=args.tests,
cwd=os.getcwd(),
- os=platform.platform())
- if args.version:
- if os.path.isfile(constants.VERSION_FILE):
- with open(constants.VERSION_FILE) as version_file:
- print(version_file.read())
- return constants.EXIT_CODE_SUCCESS
- if not _is_inside_android_root():
- atest_utils.colorful_print(
- "\nAtest must always work under ${}!".format(
- constants.ANDROID_BUILD_TOP), constants.RED)
- return constants.EXIT_CODE_OUTSIDE_ROOT
- if args.help:
- atest_arg_parser.print_epilog_text()
- return constants.EXIT_CODE_SUCCESS
- if args.history:
- atest_execution_info.print_test_result(constants.ATEST_RESULT_ROOT,
- args.history)
- return constants.EXIT_CODE_SUCCESS
+ os=os_pyver)
+ _non_action_validator(args)
mod_info = module_info.ModuleInfo(force_build=args.rebuild_module_info)
if args.rebuild_module_info:
_run_extra_tasks(join=True)
@@ -616,11 +672,11 @@
if args.list_modules:
_print_testable_modules(mod_info, args.list_modules)
return constants.EXIT_CODE_SUCCESS
- build_targets = set()
- test_infos = set()
# Clear cache if user pass -c option
if args.clear_cache:
atest_utils.clean_test_info_caches(args.tests)
+ build_targets = set()
+ test_infos = set()
if _will_run_tests(args):
build_targets, test_infos = translator.translate(args)
if not test_infos:
@@ -634,23 +690,8 @@
build_targets |= test_runner_handler.get_test_runner_reqs(mod_info,
test_infos)
extra_args = get_extra_args(args)
- if args.update_cmd_mapping or args.verify_cmd_mapping:
- args.dry_run = True
- if args.dry_run:
- args.tests.sort()
- dry_run_cmds = _dry_run(results_dir, extra_args, test_infos)
- if args.verify_cmd_mapping:
- try:
- atest_utils.handle_test_runner_cmd(' '.join(args.tests),
- dry_run_cmds,
- do_verification=True)
- except atest_error.DryRunVerificationError as e:
- atest_utils.colorful_print(str(e), constants.RED)
- return constants.EXIT_CODE_VERIFY_FAILURE
- if args.update_cmd_mapping:
- atest_utils.handle_test_runner_cmd(' '.join(args.tests),
- dry_run_cmds)
- return constants.EXIT_CODE_SUCCESS
+ if any((args.update_cmd_mapping, args.verify_cmd_mapping, args.dry_run)):
+ _dry_run_validator(args, results_dir, extra_args, test_infos)
if args.detect_regression:
build_targets |= (regression_test_runner.RegressionTestRunner('')
.get_test_runner_build_reqs())
@@ -664,15 +705,8 @@
# Add module-info.json target to the list of build targets to keep the
# file up to date.
build_targets.add(mod_info.module_info_target)
- # Build the deps-license to generate dependencies data in
- # module-info.json.
- build_targets.add(constants.DEPS_LICENSE)
- # The environment variables PROJ_PATH and DEP_PATH are necessary for the
- # deps-license.
- build_env = dict(constants.DEPS_LICENSE_ENV)
build_start = time.time()
- success = atest_utils.build(build_targets, verbose=args.verbose,
- env_vars=build_env)
+ success = atest_utils.build(build_targets, verbose=args.verbose)
metrics.BuildFinishEvent(
duration=metrics_utils.convert_duration(time.time() - build_start),
success=success,
@@ -721,7 +755,14 @@
with atest_execution_info.AtestExecutionInfo(sys.argv[1:],
RESULTS_DIR,
ARGS) as result_file:
- metrics_base.MetricsBase.tool_name = constants.TOOL_NAME
+ if not ARGS.no_metrics:
+ atest_utils.print_data_collection_notice()
+ USER_FROM_TOOL = os.getenv(constants.USER_FROM_TOOL, '')
+ if USER_FROM_TOOL == '':
+ metrics_base.MetricsBase.tool_name = constants.TOOL_NAME
+ else:
+ metrics_base.MetricsBase.tool_name = USER_FROM_TOOL
+
EXIT_CODE = main(sys.argv[1:], RESULTS_DIR, ARGS)
DETECTOR = bug_detector.BugDetector(sys.argv[1:], EXIT_CODE)
metrics.LocalDetectEvent(
diff --git a/atest/atest_arg_parser.py b/atest/atest_arg_parser.py
index 8bc146c..178936e 100644
--- a/atest/atest_arg_parser.py
+++ b/atest/atest_arg_parser.py
@@ -23,7 +23,6 @@
import argparse
import pydoc
-import atest_utils
import constants
# Constants used for AtestArgParser and EPILOG_TEMPLATE
@@ -53,7 +52,9 @@
'Note: Nothing\'s going to run if it\'s not an Instant App test and '
'"--instant" is passed.')
ITERATION = 'Loop-run tests until the max iteration is reached. (10 by default)'
+LATEST_RESULT = 'Print latest test result.'
LIST_MODULES = 'List testable modules for the given suite.'
+NO_METRICS = 'Do not send metrics.'
REBUILD_MODULE_INFO = ('Forces a rebuild of the module-info.json file. '
'This may be necessary following a repo sync or '
'when writing a new test.')
@@ -69,6 +70,7 @@
TEST_MAPPING = 'Run tests defined in TEST_MAPPING files.'
TF_TEMPLATE = ('Add extra tradefed template for ATest suite, '
'e.g. atest <test> --tf-template <template_key>=<template_path>')
+TF_DEBUG = 'Enable tradefed debug mode with a specify port. Default value is 10888.'
SHARDING = 'Option to specify sharding count. The default value is 2'
UPDATE_CMD_MAPPING = ('Update the test command of input tests. Warning: result '
'will be saved under tools/tradefederation/core/atest/test_data.')
@@ -103,7 +105,6 @@
def __init__(self):
"""Initialise an ArgumentParser instance."""
- atest_utils.print_data_collection_notice()
super(AtestArgParser, self).__init__(
description=HELP_DESC, add_help=False)
@@ -181,7 +182,10 @@
help=UPDATE_CMD_MAPPING)
self.add_argument('-y', '--verify-cmd-mapping', action='store_true',
help=VERIFY_CMD_MAPPING)
-
+ # Options for Tradefed debug mode.
+ self.add_argument('-D', '--tf-debug', nargs='?', const=10888,
+ type=_positive_int, default=0,
+ help=TF_DEBUG)
# Options for Tradefed customization related.
self.add_argument('--tf-template', action='append',
help=TF_TEMPLATE)
@@ -200,10 +204,18 @@
type=_positive_int, const=10, default=0,
metavar='MAX_ITERATIONS', help=RETRY_ANY_FAILURE)
- # Option for test result history.
- group.add_argument('--history', nargs='?',
- type=_positive_int, const=1000, default=0,
- help=HISTORY)
+ # A group of options for history. They are mutually exclusive
+ # in a command line.
+ history_group = self.add_mutually_exclusive_group()
+ # History related options.
+ history_group.add_argument('--latest-result', action='store_true',
+ help=LATEST_RESULT)
+ history_group.add_argument('--history', nargs='?', const='99999',
+ help=HISTORY)
+
+ # Options for disabling collecting data for metrics.
+ self.add_argument(constants.NO_METRICS_ARG, action='store_true',
+ help=NO_METRICS)
# This arg actually doesn't consume anything, it's primarily used for
# the help description and creating custom_args in the NameSpace object.
@@ -247,7 +259,9 @@
INSTALL=INSTALL,
INSTANT=INSTANT,
ITERATION=ITERATION,
+ LATEST_RESULT=LATEST_RESULT,
LIST_MODULES=LIST_MODULES,
+ NO_METRICS=NO_METRICS,
REBUILD_MODULE_INFO=REBUILD_MODULE_INFO,
RERUN_UNTIL_FAILURE=RERUN_UNTIL_FAILURE,
RETRY_ANY_FAILURE=RETRY_ANY_FAILURE,
@@ -255,6 +269,7 @@
SHARDING=SHARDING,
TEST=TEST,
TEST_MAPPING=TEST_MAPPING,
+ TF_DEBUG=TF_DEBUG,
TF_TEMPLATE=TF_TEMPLATE,
USER_TYPE=USER_TYPE,
UPDATE_CMD_MAPPING=UPDATE_CMD_MAPPING,
@@ -288,6 +303,9 @@
-d, --disable-teardown
{DISABLE_TEARDOWN}
+ -D --tf-debug
+ {TF_DEBUG}
+
--history
{HISTORY}
@@ -337,6 +355,9 @@
-L, --list-modules
{LIST_MODULES}
+ --latest-result
+ {LATEST_RESULT}
+
-v, --verbose
{VERBOSE}
@@ -376,6 +397,10 @@
--retry-any-failure
{RETRY_ANY_FAILURE}
+ [ Metrics ]
+ --no-metrics
+ {NO_METRICS}
+
EXAMPLES
- - - - - - - - -
diff --git a/atest/atest_completion.sh b/atest/atest_completion.sh
index 37b48c7..1c38ce7 100644
--- a/atest/atest_completion.sh
+++ b/atest/atest_completion.sh
@@ -24,9 +24,11 @@
# [[ -r "$completion_file" ]] && source "$completion_file"
# Open a new terminal, source/lunch and try again.
reqs=(compopt _get_comp_words_by_ref __ltrim_colon_completions)
- if ! type "${reqs[@]}" >/dev/null 2>&1; then
- return 0
- fi
+ for _cmd in "${reqs[@]}"; do
+ if ! type "$_cmd" >/dev/null 2>&1; then
+ return 1
+ fi
+ done
}
_fetch_testable_modules() {
@@ -160,7 +162,7 @@
# BASH version <= 4.3 doesn't have nosort option.
# Note that nosort has no effect for zsh.
local _atest_comp_options="-o default -o nosort"
- local _atest_executables=(atest-dev atest-src)
+ local _atest_executables=(atest atest-dev atest-src atest-py3)
for exec in "${_atest_executables[*]}"; do
complete -F _atest $_atest_comp_options $exec 2>/dev/null || \
complete -F _atest -o default $exec
diff --git a/atest/atest_execution_info.py b/atest/atest_execution_info.py
index dbad489..013f308 100644
--- a/atest/atest_execution_info.py
+++ b/atest/atest_execution_info.py
@@ -24,6 +24,7 @@
import os
import sys
+import atest_utils as au
import constants
from metrics import metrics_utils
@@ -41,6 +42,10 @@
_TEST_RESULT_NAME = 'test_result'
_EXIT_CODE_ATTR = 'EXIT_CODE'
_MAIN_MODULE_KEY = '__main__'
+_UUID_LEN = 30
+_RESULT_LEN = 35
+_COMMAND_LEN = 50
+_LOGCAT_FMT = '{}/log/invocation_*/{}*logcat-on-failure*'
_SUMMARY_MAP_TEMPLATE = {_STATUS_PASSED_KEY : 0,
_STATUS_FAILED_KEY : 0,
@@ -73,18 +78,30 @@
os.symlink(test_result_dir, symlink)
-def print_test_result(root, num):
+def print_test_result(root, history_arg):
"""Make a list of latest n test result.
Args:
root: A string of the test result root path.
- num: An integer, the number of latest results.
+ history_arg: A string of an integer or uuid. If it's an integer string,
+ the number of lines of test result will be given; else it
+ will be treated a uuid and print test result accordingly
+ in detail.
"""
+ if not history_arg.isdigit():
+ path = os.path.join(constants.ATEST_RESULT_ROOT, history_arg,
+ 'test_result')
+ print_test_result_by_path(path)
+ return
target = '%s/20*_*_*' % root
paths = glob.glob(target)
paths.sort(reverse=True)
- print('{:-^22} {:-^35} {:-^50}'.format('uuid', 'result', 'command'))
- for path in paths[0: num+1]:
+ print('{:-^{uuid_len}} {:-^{result_len}} {:-^{command_len}}'
+ .format('uuid', 'result', 'command',
+ uuid_len=_UUID_LEN,
+ result_len=_RESULT_LEN,
+ command_len=_COMMAND_LEN))
+ for path in paths[0: int(history_arg)+1]:
result_path = os.path.join(path, 'test_result')
if os.path.isfile(result_path):
try:
@@ -93,14 +110,58 @@
total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
summary_str = ', '.join([k+':'+str(v)
for k, v in total_summary.items()])
- print('{:<22} {:<35} {:<50}'
+ print('{:<{uuid_len}} {:<{result_len}} atest {:<{command_len}}'
.format(os.path.basename(path),
summary_str,
- 'atest '+result.get(_ARGS_KEY, '')))
- except json.JSONDecodeError:
+ result.get(_ARGS_KEY, ''),
+ uuid_len=_UUID_LEN,
+ result_len=_RESULT_LEN,
+ command_len=_COMMAND_LEN))
+ except ValueError:
pass
+def print_test_result_by_path(path):
+ """Print latest test result.
+
+ Args:
+ path: A string of test result path.
+ """
+ if os.path.isfile(path):
+ with open(path) as json_file:
+ result = json.load(json_file)
+ print("\natest {}".format(result.get(_ARGS_KEY, '')))
+ print('\nTotal Summary:\n{}'.format(au.delimiter('-')))
+ total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
+ print(', '.join([(k+':'+str(v))
+ for k, v in total_summary.items()]))
+ fail_num = total_summary.get(_STATUS_FAILED_KEY)
+ if fail_num > 0:
+ message = '%d test failed' % fail_num
+ print('\n')
+ print(au.colorize(message, constants.RED))
+ print('-' * len(message))
+ test_runner = result.get(_TEST_RUNNER_KEY, {})
+ for runner_name in test_runner.keys():
+ test_dict = test_runner.get(runner_name, {})
+ for test_name in test_dict:
+ test_details = test_dict.get(test_name, {})
+ for fail in test_details.get(_STATUS_FAILED_KEY):
+ print(au.colorize('{}'.format(
+ fail.get(_TEST_NAME_KEY)), constants.RED))
+ failure_files = glob.glob(_LOGCAT_FMT.format(
+ os.path.dirname(path), fail.get(_TEST_NAME_KEY)
+ ))
+ if failure_files:
+ print('{} {}'.format(
+ au.colorize('LOGCAT-ON-FAILURES:',
+ constants.CYAN),
+ failure_files[0]))
+ print('{} {}'.format(
+ au.colorize('STACKTRACE:\n', constants.CYAN),
+ fail.get(_TEST_DETAILS_KEY)))
+
+
def has_non_test_options(args):
"""
check whether non-test option in the args.
@@ -117,7 +178,8 @@
or args.help
or args.history
or args.info
- or args.version)
+ or args.version
+ or args.latest_result)
class AtestExecutionInfo:
diff --git a/atest/atest_utils.py b/atest/atest_utils.py
index 8ba6c21..d145a14 100644
--- a/atest/atest_utils.py
+++ b/atest/atest_utils.py
@@ -21,7 +21,6 @@
from __future__ import print_function
-import curses
import hashlib
import itertools
import json
@@ -131,15 +130,15 @@
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, env=env_vars)
sys.stdout.write('\n')
- term_width, _ = _get_terminal_size()
+ term_width, _ = get_terminal_size()
white_space = " " * int(term_width)
full_output = []
while proc.poll() is None:
- line = proc.stdout.readline()
+ line = proc.stdout.readline().decode('utf-8')
# Readline will often return empty strings.
if not line:
continue
- full_output.append(line.decode('utf-8'))
+ full_output.append(line)
# Trim the line to the width of the terminal.
# Note: Does not handle terminal resizing, which is probably not worth
# checking the width every loop.
@@ -289,6 +288,7 @@
cached_has_colors[stream] = False
return False
try:
+ import curses
curses.setupterm()
cached_has_colors[stream] = curses.tigetnum("colors") > 2
# pylint: disable=broad-except
@@ -341,7 +341,7 @@
print(output, end="")
-def _get_terminal_size():
+def get_terminal_size():
"""Get terminal size and return a tuple.
Returns:
@@ -350,7 +350,8 @@
# Determine the width of the terminal. We'll need to clear this many
# characters when carriage returning. Set default value as 80.
columns, rows = shutil.get_terminal_size(
- fallback=(_DEFAULT_TERMINAL_WIDTH, _DEFAULT_TERMINAL_HEIGHT))
+ fallback=(_DEFAULT_TERMINAL_WIDTH,
+ _DEFAULT_TERMINAL_HEIGHT))
return columns, rows
@@ -386,10 +387,10 @@
constants.PRIVACY_POLICY_URL,
constants.TERMS_SERVICE_URL
)
- print('\n==================')
+ print(delimiter('=', 18, prenl=1))
colorful_print("Notice:", constants.RED)
colorful_print("%s" % notice, constants.GREEN)
- print('==================\n')
+ print(delimiter('=', 18, postnl=1))
def handle_test_runner_cmd(input_test, test_cmds, do_verification=False,
@@ -624,3 +625,17 @@
except (OSError, subprocess.CalledProcessError) as err:
logging.debug('Exception raised: %s', err)
return modified_files
+
+def delimiter(char, length=_DEFAULT_TERMINAL_WIDTH, prenl=0, postnl=0):
+ """A handy delimiter printer.
+
+ Args:
+ char: A string used for delimiter.
+ length: An integer for the replication.
+ prenl: An integer that insert '\n' before delimiter.
+ postnl: An integer that insert '\n' after delimiter.
+
+ Returns:
+ A string of delimiter.
+ """
+ return prenl * '\n' + char * length + postnl * '\n'
diff --git a/atest/atest_utils_unittest.py b/atest/atest_utils_unittest.py
index 516e5bb..cfbcad6 100755
--- a/atest/atest_utils_unittest.py
+++ b/atest/atest_utils_unittest.py
@@ -407,5 +407,9 @@
self.assertEqual({'/a/b/test_fp4', '/a/b/test_fp3.java'},
atest_utils.get_modified_files(''))
+ def test_delimiter(self):
+ """Test method delimiter"""
+ self.assertEqual('\n===\n\n', atest_utils.delimiter('=', 3, 1, 2))
+
if __name__ == "__main__":
unittest.main()
diff --git a/atest/constants_default.py b/atest/constants_default.py
index 775eaa6..90f3d38 100644
--- a/atest/constants_default.py
+++ b/atest/constants_default.py
@@ -53,6 +53,7 @@
ITERATIONS = 'ITERATIONS'
RERUN_UNTIL_FAILURE = 'RERUN_UNTIL_FAILURE'
RETRY_ANY_FAILURE = 'RETRY_ANY_FAILURE'
+TF_DEBUG = 'TF_DEBUG'
COLLECT_TESTS_ONLY = 'COLLECT_TESTS_ONLY'
TF_TEMPLATE = 'TF_TEMPLATE'
@@ -152,6 +153,7 @@
BOTH_TEST = 'both'
# Metrics
+NO_METRICS_ARG = '--no-metrics'
METRICS_URL = 'http://asuite-218222.appspot.com/atest/metrics'
EXTERNAL = 'EXTERNAL_RUN'
INTERNAL = 'INTERNAL_RUN'
@@ -165,6 +167,7 @@
PRIVACY_POLICY_URL = 'https://policies.google.com/privacy'
TERMS_SERVICE_URL = 'https://policies.google.com/terms'
TOOL_NAME = 'atest'
+USER_FROM_TOOL = 'USER_FROM_TOOL'
TF_PREPARATION = 'tf-preparation'
# Detect type for local_detect_event.
@@ -195,15 +198,11 @@
ATEST_TF_MODULE = 'atest-tradefed'
# Build environment variable for each build on ATest
+# With RECORD_ALL_DEPS enabled, ${ANDROID_PRODUCT_OUT}/module-info.json will
+# generate modules' dependencies info when make.
# With SOONG_COLLECT_JAVA_DEPS enabled, out/soong/module_bp_java_deps.json will
# be generated when make.
-ATEST_BUILD_ENV = {'SOONG_COLLECT_JAVA_DEPS':'true'}
-
-# For generating dependencies in module-info.json, appending deps-license in the
-# make command is a must. Also the environment variables PROJ_PATH and DEP_PATH
-# are necessary.
-DEPS_LICENSE = 'deps-license'
-DEPS_LICENSE_ENV = {'PROJ_PATH': '.', 'DEP_PATH': '.'}
+ATEST_BUILD_ENV = {'RECORD_ALL_DEPS':'true', 'SOONG_COLLECT_JAVA_DEPS':'true'}
# Atest index path and relative dirs/caches.
INDEX_DIR = os.path.join(os.getenv(ANDROID_HOST_OUT, ''), 'indexes')
@@ -233,3 +232,14 @@
r'(?P<package>[^(;|\s)]+)\s*')
ATEST_RESULT_ROOT = '/tmp/atest_result'
+LATEST_RESULT_FILE = os.path.join(ATEST_RESULT_ROOT, 'LATEST', 'test_result')
+
+# Tests list which need vts_kernel_tests as test dependency
+REQUIRED_KERNEL_TEST_MODULES = [
+ 'vts_ltp_test_arm',
+ 'vts_ltp_test_arm_64',
+ 'vts_linux_kselftest_arm_32',
+ 'vts_linux_kselftest_arm_64',
+ 'vts_linux_kselftest_x86_32',
+ 'vts_linux_kselftest_x86_64'
+]
diff --git a/atest/module_info.py b/atest/module_info.py
index 3e9f28f..0be78ed 100644
--- a/atest/module_info.py
+++ b/atest/module_info.py
@@ -82,9 +82,7 @@
logging.debug('Generating %s - this is required for '
'initial runs.', _MODULE_INFO)
build_env = dict(constants.ATEST_BUILD_ENV)
- build_env.update(constants.DEPS_LICENSE_ENV)
- # Also build the deps-license module to generate dependencies data.
- atest_utils.build([module_info_target, constants.DEPS_LICENSE],
+ atest_utils.build([module_info_target],
verbose=logging.getLogger().isEnabledFor(
logging.DEBUG), env_vars=build_env)
return module_info_target, module_file_path
@@ -161,7 +159,16 @@
def get_module_info(self, mod_name):
"""Return dict of info for given module name, None if non-existent."""
- return self.name_to_module_info.get(mod_name)
+ module_info = self.name_to_module_info.get(mod_name)
+ # Android's build system will automatically adding 2nd arch bitness
+ # string at the end of the module name which will make atest could not
+ # finding matched module. Rescan the module-info with matched module
+ # name without bitness.
+ if not module_info:
+ for _, module_info in self.name_to_module_info.items():
+ if mod_name == module_info.get(constants.MODULE_NAME, ''):
+ break
+ return module_info
def is_suite_in_compatibility_suites(self, suite, mod_info):
"""Check if suite exists in the compatibility_suites of module-info.
diff --git a/atest/result_reporter.py b/atest/result_reporter.py
index 8923e65..9d968ff 100644
--- a/atest/result_reporter.py
+++ b/atest/result_reporter.py
@@ -73,6 +73,120 @@
UNSUPPORTED_FLAG = 'UNSUPPORTED_RUNNER'
FAILURE_FLAG = 'RUNNER_FAILURE'
+BENCHMARK_ESSENTIAL_KEYS = {'repetition_index', 'cpu_time', 'name', 'repetitions',
+ 'run_type', 'threads', 'time_unit', 'iterations',
+ 'run_name', 'real_time'}
+# TODO(b/146875480): handle the optional benchmark events
+BENCHMARK_OPTIONAL_KEYS = {'bytes_per_second', 'label'}
+BENCHMARK_EVENT_KEYS = BENCHMARK_ESSENTIAL_KEYS.union(BENCHMARK_OPTIONAL_KEYS)
+INT_KEYS = {'cpu_time', 'real_time'}
+
+class PerfInfo():
+ """Class for storing performance test of a test run."""
+
+ def __init__(self):
+ """Initialize a new instance of PerfInfo class."""
+ # perf_info: A list of benchmark_info(dict).
+ self.perf_info = []
+
+ def update_perf_info(self, test):
+ """Update perf_info with the given result of a single test.
+
+ Args:
+ test: A TestResult namedtuple.
+ """
+ all_additional_keys = set(test.additional_info.keys())
+ # Ensure every key is in all_additional_keys.
+ if not BENCHMARK_ESSENTIAL_KEYS.issubset(all_additional_keys):
+ return
+ benchmark_info = {}
+ benchmark_info['test_name'] = test.test_name
+ for key, data in test.additional_info.items():
+ if key in INT_KEYS:
+ data_to_int = data.split('.')[0]
+ benchmark_info[key] = data_to_int
+ elif key in BENCHMARK_EVENT_KEYS:
+ benchmark_info[key] = data
+ if benchmark_info:
+ self.perf_info.append(benchmark_info)
+
+ def print_perf_info(self):
+ """Print summary of a perf_info."""
+ if not self.perf_info:
+ return
+ classify_perf_info, max_len = self._classify_perf_info()
+ separator = '-' * au.get_terminal_size()[0]
+ print(separator)
+ print("{:{name}} {:^{real_time}} {:^{cpu_time}} "
+ "{:>{iterations}}".format(
+ 'Benchmark', 'Time', 'CPU', 'Iteration',
+ name=max_len['name']+3,
+ real_time=max_len['real_time']+max_len['time_unit']+1,
+ cpu_time=max_len['cpu_time']+max_len['time_unit']+1,
+ iterations=max_len['iterations']))
+ print(separator)
+ for module_name, module_perf_info in classify_perf_info.items():
+ print("{}:".format(module_name))
+ for benchmark_info in module_perf_info:
+ # BpfBenchMark/MapWriteNewEntry/1 1530 ns 1522 ns 460517
+ print(" #{:{name}} {:>{real_time}} {:{time_unit}} "
+ "{:>{cpu_time}} {:{time_unit}} "
+ "{:>{iterations}}".format(benchmark_info['name'],
+ benchmark_info['real_time'],
+ benchmark_info['time_unit'],
+ benchmark_info['cpu_time'],
+ benchmark_info['time_unit'],
+ benchmark_info['iterations'],
+ name=max_len['name'],
+ real_time=max_len['real_time'],
+ time_unit=max_len['time_unit'],
+ cpu_time=max_len['cpu_time'],
+ iterations=max_len['iterations']))
+
+ def _classify_perf_info(self):
+ """Classify the perf_info by test module name.
+
+ Returns:
+ A tuple of (classified_perf_info, max_len), where
+ classified_perf_info: A dict of perf_info and each perf_info are
+ belong to different modules.
+ e.g.
+ { module_name_01: [perf_info of module_1],
+ module_name_02: [perf_info of module_2], ...}
+ max_len: A dict which stores the max length of each event.
+ It contains the max string length of 'name', real_time',
+ 'time_unit', 'cpu_time', 'iterations'.
+ e.g.
+ {name: 56, real_time: 9, time_unit: 2, cpu_time: 8,
+ iterations: 12}
+ """
+ module_categories = set()
+ max_len = {}
+ all_name = []
+ all_real_time = []
+ all_time_unit = []
+ all_cpu_time = []
+ all_iterations = ['Iteration']
+ for benchmark_info in self.perf_info:
+ module_categories.add(benchmark_info['test_name'].split('#')[0])
+ all_name.append(benchmark_info['name'])
+ all_real_time.append(benchmark_info['real_time'])
+ all_time_unit.append(benchmark_info['time_unit'])
+ all_cpu_time.append(benchmark_info['cpu_time'])
+ all_iterations.append(benchmark_info['iterations'])
+ classified_perf_info = {}
+ for module_name in module_categories:
+ module_perf_info = []
+ for benchmark_info in self.perf_info:
+ if benchmark_info['test_name'].split('#')[0] == module_name:
+ module_perf_info.append(benchmark_info)
+ classified_perf_info[module_name] = module_perf_info
+ max_len = {'name': len(max(all_name, key=len)),
+ 'real_time': len(max(all_real_time, key=len)),
+ 'time_unit': len(max(all_time_unit, key=len)),
+ 'cpu_time': len(max(all_cpu_time, key=len)),
+ 'iterations': len(max(all_iterations, key=len))}
+ return classified_perf_info, max_len
class RunStat:
@@ -95,6 +209,7 @@
self.failed = failed
self.ignored = ignored
self.assumption_failed = assumption_failed
+ self.perf_info = PerfInfo()
# Run errors are not for particular tests, they are runner errors.
self.run_errors = run_errors
@@ -229,7 +344,7 @@
if not self.runners:
return tests_ret
print('\n%s' % au.colorize('Summary', constants.CYAN))
- print('-------')
+ print(au.delimiter('-', 7))
if self.rerun_options:
print(self.rerun_options)
failed_sum = len(self.failed_tests)
@@ -251,6 +366,7 @@
tests_ret = constants.EXIT_CODE_TEST_FAILURE
failed_sum += 1 if not stats.failed else 0
print(summary)
+ self.run_stats.perf_info.print_perf_info()
print()
if tests_ret == constants.EXIT_CODE_SUCCESS:
print(au.colorize('All tests passed!', constants.GREEN))
@@ -337,6 +453,7 @@
elif test.status == test_runner_base.ERROR_STATUS:
self.run_stats.run_errors = True
group.run_errors = True
+ self.run_stats.perf_info.update_perf_info(test)
def _print_group_title(self, test):
"""Print the title line for a test group.
@@ -386,7 +503,8 @@
constants.GREEN),
test.test_time))
for key, data in test.additional_info.items():
- print('\t%s: %s' % (au.colorize(key, constants.BLUE), data))
+ if key not in BENCHMARK_EVENT_KEYS:
+ print('\t%s: %s' % (au.colorize(key, constants.BLUE), data))
elif test.status == test_runner_base.IGNORED_STATUS:
# Example: [33/92] test_name: IGNORED (12ms)
print('[%s/%s] %s: %s %s' % (test.test_count, test.group_total,
diff --git a/atest/result_reporter_unittest.py b/atest/result_reporter_unittest.py
index f484678..71fc91e 100755
--- a/atest/result_reporter_unittest.py
+++ b/atest/result_reporter_unittest.py
@@ -141,6 +141,89 @@
test_run_name='com.android.UnitTests'
)
+ADDITIONAL_INFO_PERF01_TEST01 = {u'repetition_index': u'0',
+ u'cpu_time': u'10001.10001',
+ u'name': u'perfName01',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1001',
+ u'run_name': u'perfName01',
+ u'real_time': u'11001.11001'}
+
+RESULT_PERF01_TEST01 = test_runner_base.TestResult(
+ runner_name='someTestRunner',
+ group_name='someTestModule',
+ test_name='somePerfClass01#perfName01',
+ status=test_runner_base.PASSED_STATUS,
+ details=None,
+ test_count=1,
+ test_time='(10ms)',
+ runner_total=None,
+ group_total=2,
+ additional_info=ADDITIONAL_INFO_PERF01_TEST01,
+ test_run_name='com.android.UnitTests'
+)
+
+RESULT_PERF01_TEST02 = test_runner_base.TestResult(
+ runner_name='someTestRunner',
+ group_name='someTestModule',
+ test_name='somePerfClass01#perfName02',
+ status=test_runner_base.PASSED_STATUS,
+ details=None,
+ test_count=1,
+ test_time='(10ms)',
+ runner_total=None,
+ group_total=2,
+ additional_info={u'repetition_index': u'0', u'cpu_time': u'10002.10002',
+ u'name': u'perfName02',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1002',
+ u'run_name': u'perfName02',
+ u'real_time': u'11002.11002'},
+ test_run_name='com.android.UnitTests'
+)
+
+RESULT_PERF01_TEST03_NO_CPU_TIME = test_runner_base.TestResult(
+ runner_name='someTestRunner',
+ group_name='someTestModule',
+ test_name='somePerfClass01#perfName03',
+ status=test_runner_base.PASSED_STATUS,
+ details=None,
+ test_count=1,
+ test_time='(10ms)',
+ runner_total=None,
+ group_total=2,
+ additional_info={u'repetition_index': u'0',
+ u'name': u'perfName03',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1003',
+ u'run_name': u'perfName03',
+ u'real_time': u'11003.11003'},
+ test_run_name='com.android.UnitTests'
+)
+
+RESULT_PERF02_TEST01 = test_runner_base.TestResult(
+ runner_name='someTestRunner',
+ group_name='someTestModule',
+ test_name='somePerfClass02#perfName11',
+ status=test_runner_base.PASSED_STATUS,
+ details=None,
+ test_count=1,
+ test_time='(10ms)',
+ runner_total=None,
+ group_total=2,
+ additional_info={u'repetition_index': u'0', u'cpu_time': u'20001.20001',
+ u'name': u'perfName11',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'2001',
+ u'run_name': u'perfName11',
+ u'real_time': u'210001.21001'},
+ test_run_name='com.android.UnitTests'
+)
+
#pylint: disable=protected-access
#pylint: disable=invalid-name
class ResultReporterUnittests(unittest.TestCase):
@@ -368,5 +451,97 @@
self.rr.process_test_result(RESULT_PASSED_TEST_MODULE_2)
self.assertNotEqual(0, self.rr.print_summary())
+ def test_update_perf_info(self):
+ """Test update_perf_info method."""
+ group = result_reporter.RunStat()
+ # 1. Test PerfInfo after RESULT_PERF01_TEST01
+ # _update_stats() will call _update_perf_info()
+ self.rr._update_stats(RESULT_PERF01_TEST01, group)
+ correct_perf_info = []
+ # trim the time form 10001.10001 to 10001
+ trim_perf01_test01 = {u'repetition_index': u'0', u'cpu_time': u'10001',
+ u'name': u'perfName01',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1001',
+ u'run_name': u'perfName01',
+ u'real_time': u'11001',
+ 'test_name': 'somePerfClass01#perfName01'}
+ correct_perf_info.append(trim_perf01_test01)
+ self.assertEqual(self.rr.run_stats.perf_info.perf_info,
+ correct_perf_info)
+ # 2. Test PerfInfo after RESULT_PERF01_TEST01
+ self.rr._update_stats(RESULT_PERF01_TEST02, group)
+ trim_perf01_test02 = {u'repetition_index': u'0', u'cpu_time': u'10002',
+ u'name': u'perfName02',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1002',
+ u'run_name': u'perfName02',
+ u'real_time': u'11002',
+ 'test_name': 'somePerfClass01#perfName02'}
+ correct_perf_info.append(trim_perf01_test02)
+ self.assertEqual(self.rr.run_stats.perf_info.perf_info,
+ correct_perf_info)
+ # 3. Test PerfInfo after RESULT_PERF02_TEST01
+ self.rr._update_stats(RESULT_PERF02_TEST01, group)
+ trim_perf02_test01 = {u'repetition_index': u'0', u'cpu_time': u'20001',
+ u'name': u'perfName11',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'2001',
+ u'run_name': u'perfName11',
+ u'real_time': u'210001',
+ 'test_name': 'somePerfClass02#perfName11'}
+ correct_perf_info.append(trim_perf02_test01)
+ self.assertEqual(self.rr.run_stats.perf_info.perf_info,
+ correct_perf_info)
+ # 4. Test PerfInfo after RESULT_PERF01_TEST03_NO_CPU_TIME
+ self.rr._update_stats(RESULT_PERF01_TEST03_NO_CPU_TIME, group)
+ # Nothing added since RESULT_PERF01_TEST03_NO_CPU_TIME lack of cpu_time
+ self.assertEqual(self.rr.run_stats.perf_info.perf_info,
+ correct_perf_info)
+
+ def test_classify_perf_info(self):
+ """Test _classify_perf_info method."""
+ group = result_reporter.RunStat()
+ self.rr._update_stats(RESULT_PERF01_TEST01, group)
+ self.rr._update_stats(RESULT_PERF01_TEST02, group)
+ self.rr._update_stats(RESULT_PERF02_TEST01, group)
+ # trim the time form 10001.10001 to 10001
+ trim_perf01_test01 = {u'repetition_index': u'0', u'cpu_time': u'10001',
+ u'name': u'perfName01',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1001',
+ u'run_name': u'perfName01',
+ u'real_time': u'11001',
+ 'test_name': 'somePerfClass01#perfName01'}
+ trim_perf01_test02 = {u'repetition_index': u'0', u'cpu_time': u'10002',
+ u'name': u'perfName02',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'1002',
+ u'run_name': u'perfName02',
+ u'real_time': u'11002',
+ 'test_name': 'somePerfClass01#perfName02'}
+ trim_perf02_test01 = {u'repetition_index': u'0', u'cpu_time': u'20001',
+ u'name': u'perfName11',
+ u'repetitions': u'0', u'run_type': u'iteration',
+ u'label': u'2123', u'threads': u'1',
+ u'time_unit': u'ns', u'iterations': u'2001',
+ u'run_name': u'perfName11',
+ u'real_time': u'210001',
+ 'test_name': 'somePerfClass02#perfName11'}
+ correct_classify_perf_info = {"somePerfClass01":[trim_perf01_test01,
+ trim_perf01_test02],
+ "somePerfClass02":[trim_perf02_test01]}
+ classify_perf_info, max_len = self.rr.run_stats.perf_info._classify_perf_info()
+ correct_max_len = {'real_time': 6, 'cpu_time': 5, 'name': 10,
+ 'iterations': 9, 'time_unit': 2}
+ self.assertEqual(max_len, correct_max_len)
+ self.assertEqual(classify_perf_info, correct_classify_perf_info)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/atest/test_finder_handler.py b/atest/test_finder_handler.py
index 6a7e900..67f5a34 100644
--- a/atest/test_finder_handler.py
+++ b/atest/test_finder_handler.py
@@ -166,7 +166,8 @@
_REFERENCE_TYPE.INTEGRATION,
_REFERENCE_TYPE.MODULE_CLASS]
# Module:some.package
- return [_REFERENCE_TYPE.CACHE, _REFERENCE_TYPE.MODULE_PACKAGE]
+ return [_REFERENCE_TYPE.CACHE, _REFERENCE_TYPE.MODULE_PACKAGE,
+ _REFERENCE_TYPE.MODULE_CLASS]
# Module:Class or IntegrationName:Class
return [_REFERENCE_TYPE.CACHE,
_REFERENCE_TYPE.INTEGRATION,
diff --git a/atest/test_finder_handler_unittest.py b/atest/test_finder_handler_unittest.py
index 9af8f9b..5888565 100755
--- a/atest/test_finder_handler_unittest.py
+++ b/atest/test_finder_handler_unittest.py
@@ -166,7 +166,7 @@
)
self.assertEqual(
test_finder_handler._get_test_reference_types('module:a.package'),
- [REF_TYPE.CACHE, REF_TYPE.MODULE_PACKAGE]
+ [REF_TYPE.CACHE, REF_TYPE.MODULE_PACKAGE, REF_TYPE.MODULE_CLASS]
)
self.assertEqual(
test_finder_handler._get_test_reference_types('.'),
diff --git a/atest/test_finders/module_finder.py b/atest/test_finders/module_finder.py
index 7a1f9c3..e9d311c 100644
--- a/atest/test_finders/module_finder.py
+++ b/atest/test_finders/module_finder.py
@@ -188,6 +188,11 @@
for module_path in self.module_info.get_paths(module_name):
mod_dir = module_path.replace('/', '-')
targets.add(_MODULES_IN % mod_dir)
+ # (b/156457698) Force add vts_kernel_tests as build target if our test
+ # belong to REQUIRED_KERNEL_TEST_MODULES due to required_module option
+ # not working for sh_test in soong.
+ if module_name in constants.REQUIRED_KERNEL_TEST_MODULES:
+ targets.add('vts_kernel_tests')
return targets
def _get_module_test_config(self, module_name, rel_config=None):
@@ -355,6 +360,35 @@
return [tinfo]
return None
+ def find_test_by_kernel_class_name(self, module_name, class_name):
+ """Find kernel test for the given class name.
+
+ Args:
+ module_name: A string of the module name to use.
+ class_name: A string of the test's class name.
+
+ Returns:
+ A list of populated TestInfo namedtuple if test found, else None.
+ """
+ class_name, methods = test_finder_utils.split_methods(class_name)
+ test_config = self._get_module_test_config(module_name)
+ test_config_path = os.path.join(self.root_dir, test_config)
+ mod_info = self.module_info.get_module_info(module_name)
+ ti_filter = frozenset(
+ [test_info.TestFilter(class_name, methods)])
+ if test_finder_utils.is_test_from_kernel_xml(test_config_path, class_name):
+ tinfo = self._process_test_info(test_info.TestInfo(
+ test_name=module_name,
+ test_runner=self._TEST_RUNNER,
+ build_targets=set(),
+ data={constants.TI_REL_CONFIG: test_config,
+ constants.TI_FILTER: ti_filter},
+ compatibility_suites=mod_info.get(
+ constants.MODULE_COMPATIBILITY_SUITES, [])))
+ if tinfo:
+ return [tinfo]
+ return None
+
def find_test_by_class_name(self, class_name, module_name=None,
rel_config=None, is_native_test=False):
"""Find test files given a class name.
@@ -417,13 +451,19 @@
module_info = module_infos[0] if module_infos else None
if not module_info:
return None
- # If the target module is NATIVE_TEST, search CC classes only.
find_result = None
+ # If the target module is NATIVE_TEST, search CC classes only.
if not self.module_info.is_native_test(module_name):
# Find by java class.
find_result = self.find_test_by_class_name(
class_name, module_info.test_name,
module_info.data.get(constants.TI_REL_CONFIG))
+ # kernel target test is also define as NATIVE_TEST in build system.
+ # TODO (b/157210083) Update find_test_by_kernel_class_name method to
+ # support gen_rule use case.
+ if not find_result:
+ find_result = self.find_test_by_kernel_class_name(
+ module_name, class_name)
# Find by cc class.
if not find_result:
find_result = self.find_test_by_cc_class_name(
diff --git a/atest/test_finders/module_finder_unittest.py b/atest/test_finders/module_finder_unittest.py
index 753c5f5..805a857 100755
--- a/atest/test_finders/module_finder_unittest.py
+++ b/atest/test_finders/module_finder_unittest.py
@@ -38,6 +38,18 @@
MODULE_CLASS = '%s:%s' % (uc.MODULE_NAME, uc.CLASS_NAME)
MODULE_PACKAGE = '%s:%s' % (uc.MODULE_NAME, uc.PACKAGE)
CC_MODULE_CLASS = '%s:%s' % (uc.CC_MODULE_NAME, uc.CC_CLASS_NAME)
+KERNEL_TEST_CLASS = 'test_class_1'
+KERNEL_TEST_CONFIG = 'KernelTest.xml'
+KERNEL_MODULE_CLASS = '%s:%s' % (constants.REQUIRED_KERNEL_TEST_MODULES[0],
+ KERNEL_TEST_CLASS)
+KERNEL_CONFIG_FILE = os.path.join(uc.TEST_DATA_DIR, KERNEL_TEST_CONFIG)
+KERNEL_CLASS_FILTER = test_info.TestFilter(KERNEL_TEST_CLASS, frozenset())
+KERNEL_MODULE_CLASS_DATA = {constants.TI_REL_CONFIG: KERNEL_CONFIG_FILE,
+ constants.TI_FILTER: frozenset([KERNEL_CLASS_FILTER])}
+KERNEL_MODULE_CLASS_INFO = test_info.TestInfo(
+ constants.REQUIRED_KERNEL_TEST_MODULES[0],
+ atf_tr.AtestTradefedTestRunner.NAME,
+ uc.CLASS_BUILD_TARGETS, KERNEL_MODULE_CLASS_DATA)
FLAT_METHOD_INFO = test_info.TestInfo(
uc.MODULE_NAME,
atf_tr.AtestTradefedTestRunner.NAME,
@@ -224,6 +236,8 @@
self.mod_finder.module_info.get_module_info.return_value = mod_info
self.assertIsNone(self.mod_finder.find_test_by_module_and_class(bad_class))
+ @mock.patch.object(module_finder.ModuleFinder, 'find_test_by_kernel_class_name',
+ return_value=None)
@mock.patch.object(module_finder.ModuleFinder, '_is_vts_module',
return_value=False)
@mock.patch.object(module_finder.ModuleFinder, '_get_build_targets')
@@ -234,7 +248,7 @@
#pylint: disable=unused-argument
def test_find_test_by_module_and_class_part_2(self, _isfile, mock_fcf,
mock_checkoutput, mock_build,
- _vts):
+ _vts, _find_kernel):
"""Test find_test_by_module_and_class for MODULE:CC_CLASS."""
# Native test was tested in test_find_test_by_cc_class_name()
self.mod_finder.module_info.is_native_test.return_value = False
@@ -260,6 +274,34 @@
self.mod_finder.module_info.is_testable_module.return_value = False
self.assertIsNone(self.mod_finder.find_test_by_module_and_class(bad_module))
+ @mock.patch.object(module_finder.ModuleFinder, '_get_module_test_config',
+ return_value=KERNEL_CONFIG_FILE)
+ @mock.patch.object(module_finder.ModuleFinder, '_is_vts_module',
+ return_value=False)
+ @mock.patch.object(module_finder.ModuleFinder, '_get_build_targets')
+ @mock.patch('subprocess.check_output', return_value=uc.FIND_CC_ONE)
+ @mock.patch.object(test_finder_utils, 'find_class_file',
+ side_effect=[None, None, '/'])
+ @mock.patch('os.path.isfile', side_effect=unittest_utils.isfile_side_effect)
+ #pylint: disable=unused-argument
+ def test_find_test_by_module_and_class_for_kernel_test(
+ self, _isfile, mock_fcf, mock_checkoutput, mock_build, _vts,
+ _test_config):
+ """Test find_test_by_module_and_class for MODULE:CC_CLASS."""
+ # Kernel test was tested in find_test_by_kernel_class_name()
+ self.mod_finder.module_info.is_native_test.return_value = False
+ self.mod_finder.module_info.is_auto_gen_test_config.return_value = False
+ self.mod_finder.module_info.is_robolectric_test.return_value = False
+ self.mod_finder.module_info.has_test_config.return_value = True
+ mock_build.return_value = uc.CLASS_BUILD_TARGETS
+ mod_info = {constants.MODULE_INSTALLED: DEFAULT_INSTALL_PATH,
+ constants.MODULE_PATH: [uc.CC_MODULE_DIR],
+ constants.MODULE_CLASS: [],
+ constants.MODULE_COMPATIBILITY_SUITES: []}
+ self.mod_finder.module_info.get_module_info.return_value = mod_info
+ t_infos = self.mod_finder.find_test_by_module_and_class(KERNEL_MODULE_CLASS)
+ unittest_utils.assert_equal_testinfos(self, t_infos[0], KERNEL_MODULE_CLASS_INFO)
+
@mock.patch.object(module_finder.ModuleFinder, '_is_vts_module',
return_value=False)
@mock.patch.object(module_finder.ModuleFinder, '_get_build_targets')
diff --git a/atest/test_finders/test_finder_utils.py b/atest/test_finders/test_finder_utils.py
index e065d37..705e7a0 100644
--- a/atest/test_finders/test_finder_utils.py
+++ b/atest/test_finders/test_finder_utils.py
@@ -970,3 +970,30 @@
dp_matrix[row-1][col-1] + cost)
return dp_matrix[row][col]
+
+
+def is_test_from_kernel_xml(xml_file, test_name):
+ """Check if test defined in xml_file.
+
+ A kernel test can be defined like:
+ <option name="test-command-line" key="test_class_1" value="command 1" />
+ where key is the name of test class and method of the runner. This method
+ returns True if the test_name was defined in the given xml_file.
+
+ Args:
+ xml_file: Absolute path to xml file.
+ test_name: test_name want to find.
+
+ Returns:
+ True if test_name in xml_file, False otherwise.
+ """
+ if not os.path.exists(xml_file):
+ raise atest_error.XmlNotExistError('%s: The xml file does'
+ 'not exist' % xml_file)
+ xml_root = ET.parse(xml_file).getroot()
+ option_tags = xml_root.findall('.//option')
+ for option_tag in option_tags:
+ if option_tag.attrib['name'] == 'test-command-line':
+ if option_tag.attrib['key'] == test_name:
+ return True
+ return False
diff --git a/atest/test_runners/atest_tf_test_runner.py b/atest/test_runners/atest_tf_test_runner.py
index 5848eb5..be954fe 100644
--- a/atest/test_runners/atest_tf_test_runner.py
+++ b/atest/test_runners/atest_tf_test_runner.py
@@ -109,7 +109,7 @@
key_path = os.path.join(self.root_dir, ape_api_key)
if ape_api_key and os.path.exists(key_path):
logging.debug('Set APE_API_KEY: %s', ape_api_key)
- os.environ['APE_API_KEY'] = ape_api_key
+ os.environ['APE_API_KEY'] = key_path
else:
logging.debug('APE_API_KEY not set, some GTS tests may fail'
' without authentication.')
@@ -151,7 +151,8 @@
ret_code = constants.EXIT_CODE_SUCCESS
for _ in range(iterations):
run_cmds = self.generate_run_commands(test_infos, extra_args)
- subproc = self.run(run_cmds[0], output_to_stdout=True)
+ subproc = self.run(run_cmds[0], output_to_stdout=True,
+ env_vars=self.generate_env_vars(extra_args))
ret_code |= self.wait_for_subprocess(subproc)
return ret_code
@@ -172,7 +173,8 @@
server = self._start_socket_server()
run_cmds = self.generate_run_commands(test_infos, extra_args,
server.getsockname()[1])
- subproc = self.run(run_cmds[0], output_to_stdout=self.is_verbose)
+ subproc = self.run(run_cmds[0], output_to_stdout=self.is_verbose,
+ env_vars=self.generate_env_vars(extra_args))
self.handle_subprocess(subproc, partial(self._start_monitor,
server,
subproc,
@@ -287,6 +289,15 @@
server.getsockname()[1])
return server
+ def generate_env_vars(self, extra_args):
+ """Convert extra args into env vars."""
+ env_vars = os.environ.copy()
+ debug_port = extra_args.get(constants.TF_DEBUG, '')
+ if debug_port:
+ env_vars['TF_DEBUG'] = 'true'
+ env_vars['TF_DEBUG_PORT'] = str(debug_port)
+ return env_vars
+
# pylint: disable=unnecessary-pass
# Please keep above disable flag to ensure host_env_check is overriden.
def host_env_check(self):
@@ -413,6 +424,9 @@
if constants.COLLECT_TESTS_ONLY == arg:
args_to_append.append('--collect-tests-only')
continue
+ if constants.TF_DEBUG == arg:
+ print("Please attach process to your IDE...")
+ continue
args_not_supported.append(arg)
return args_to_append, args_not_supported
diff --git a/atest/test_runners/atest_tf_test_runner_unittest.py b/atest/test_runners/atest_tf_test_runner_unittest.py
index 909414d..cea4e39 100755
--- a/atest/test_runners/atest_tf_test_runner_unittest.py
+++ b/atest/test_runners/atest_tf_test_runner_unittest.py
@@ -306,11 +306,13 @@
self.tr._try_set_gts_authentication_key()
mock_exist.assert_not_called()
- @mock.patch('os.path.exists')
- def test_try_set_gts_authentication_key_not_set(self, mock_exist):
+ @mock.patch('os.path.join', return_value='/tmp/file_not_exist.json')
+ def test_try_set_gts_authentication_key_not_set(self, _):
"""Test try_set_authentication_key_not_set method."""
- # Test key neither exists nor set by user.
- mock_exist.return_value = False
+ # Delete the environment variable if it's set. This is fine for this
+ # method because it's for validating the APE_API_KEY isn't set.
+ if os.environ.get('APE_API_KEY'):
+ del os.environ['APE_API_KEY']
self.tr._try_set_gts_authentication_key()
self.assertEqual(os.environ.get('APE_API_KEY'), None)
diff --git a/atest/tools/atest_tools.py b/atest/tools/atest_tools.py
index 03feb3e..d424e85 100755
--- a/atest/tools/atest_tools.py
+++ b/atest/tools/atest_tools.py
@@ -51,9 +51,9 @@
# -print | awk -F/ '{{print $NF}}'| sort -u
PRUNENAMES = ['.abc', '.appveyor', '.azure-pipelines',
'.bazelci', '.buildscript',
- '.ci', '.circleci', '.conan', '.config',
+ '.cache', '.ci', '.circleci', '.conan', '.config',
'.externalToolBuilders',
- '.git', '.github', '.github-ci', '.google', '.gradle',
+ '.git', '.github', '.gitlab-ci', '.google', '.gradle',
'.idea', '.intermediates',
'.jenkins',
'.kokoro',
@@ -62,7 +62,7 @@
'.prebuilt_info', '.private', '__pycache__',
'.repo',
'.semaphore', '.settings', '.static', '.svn',
- '.test', '.travis', '.tx',
+ '.test', '.travis', '.travis_scripts', '.tx',
'.vscode']
def _mkdir_when_inexists(dirname):
@@ -157,6 +157,7 @@
match.group(value))
try:
pickle.dump(_dict, cache_file, protocol=2)
+ logging.debug('Done')
except IOError:
os.remove(dump_file)
logging.error('Failed in dumping %s', dump_file)
@@ -208,6 +209,7 @@
with open(index, 'wb') as cache:
try:
pickle.dump(testable_modules, cache, protocol=2)
+ logging.debug('Done')
except IOError:
os.remove(cache)
logging.error('Failed in dumping %s', cache)
@@ -288,6 +290,7 @@
_dict.setdefault(fqcn, set()).add(match.group('java_path'))
try:
pickle.dump(_dict, cache_file, protocol=2)
+ logging.debug('Done')
except (KeyboardInterrupt, SystemExit):
logging.error('Process interrupted or failure.')
os.remove(index)
diff --git a/atest/unittest_data/KernelTest.xml b/atest/unittest_data/KernelTest.xml
new file mode 100644
index 0000000..a2a110f
--- /dev/null
+++ b/atest/unittest_data/KernelTest.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2020 The Android Open Source Project
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs kernel_test.">
+ <test class="com.android.tradefed.testtype.binary.KernelTargetTest" >
+ <option name="ignore-binary-check" value="true" />
+ <option name="per-binary-timeout" value="360000" />
+ <option name="test-command-line" key="test_class_1" value="command 1" />
+ <option name="test-command-line" key="test_class_2" value="command 2" />
+ <option name="test-command-line" key="test_class_3" value="command 3" />
+ </test>
+</configuration>