Merge "header-abi-diff: Add a flag to consider opaque types with different names structurally unequal."
diff --git a/scripts/native_heapdump_viewer.py b/scripts/native_heapdump_viewer.py
index 5d5db10..32b99db 100755
--- a/scripts/native_heapdump_viewer.py
+++ b/scripts/native_heapdump_viewer.py
@@ -322,7 +322,9 @@
     font-family: Roboto Mono,monospace;
     cursor: pointer
 }
-</style></head><body>Native allocation HTML viewer<ol>
+</style></head><body>Native allocation HTML viewer<br><br>
+Click on an individual line to expand/collapse to see the details of the
+allocation data<ol>
 """
 html_footer = "</ol></body></html>"
 
diff --git a/tools/compare_failed_tests/compare_failed_tests.py b/tools/compare_failed_tests/compare_failed_tests.py
index 80c928f..c691c13 100755
--- a/tools/compare_failed_tests/compare_failed_tests.py
+++ b/tools/compare_failed_tests/compare_failed_tests.py
@@ -20,7 +20,9 @@
                  'Result.suite_name',
                  'Result.suite_plan',
                  'Result.suite_build_number',
-                 'Result.start_display']
+                 'Result.start_display',
+                 'Result::Build.build_abis_32',
+                 'Result::Build.build_abis_64',]
 
 
 def parse_attrib_path(attrib_path):
@@ -90,7 +92,7 @@
   if test_name not in tests:
     return NO_DATA
 
-  return tests[test_name]
+  return ', '.join([x + ': ' + y for x, y in tests[test_name].items()])
 
 
 def read_test_result_xml(test_result_path):
@@ -106,37 +108,38 @@
   test_result['modules'] = modules
 
   for module in root.iter('Module'):
-    module_name = '|'.join([module.attrib['name'], module.attrib['abi']])
+    abi = module.attrib['abi']
 
-    if module_name in modules:
-      print 'WARNING: Duplicate module: ' + module_name
+    module_name = module.attrib['name']
 
-    testcases = collections.OrderedDict()
-    modules[module_name] = testcases
+    if not module_name in modules:
+      modules[module_name] = collections.OrderedDict()
+
+    testcases = modules[module_name]
 
     for testcase in module.iter('TestCase'):
       testcase_name = testcase.attrib['name']
 
-      if testcase_name in testcases:
-        print 'WARNING: Duplicate testcase: ' + testcase_name
+      if not testcase_name in testcases:
+        testcases[testcase_name] = collections.OrderedDict()
 
-      tests = collections.OrderedDict()
-      testcases[testcase_name] = tests
+      tests = testcases[testcase_name]
 
       for test in testcase.iter('Test'):
         test_name = test.attrib['name']
 
-        if test_name in tests:
-          print 'WARNING: Duplicate test: ' + test_name
+        if not test_name in tests:
+          tests[test_name] = collections.OrderedDict()
 
-        result = test.attrib['result']
-        tests[test_name] = result
+        if abi in tests[test_name]:
+          print '[WARNING] duplicated test:', test_name
+
+        tests[test_name][abi] = test.attrib['result']
 
   return test_result
 
 
-def compare_failed_tests(test_result_a, test_result_b,
-                         csvfile, only_failed_both):
+def compare_failed_tests(test_result_a, test_result_b, csvfile):
   """Do the comparison.
 
   Given two test result dicts (A and B), list all failed test in A and display
@@ -146,7 +149,6 @@
     test_result_a: the dict returned from read_test_result(test_result_a.xml)
     test_result_b: the dict returned from read_test_result(test_result_b.xml)
     csvfile: a opened file
-    only_failed_both: only display tests those failed in both test results
 
   Returns:
     string: diff report, summary
@@ -166,13 +168,12 @@
       testcase_sub_summary = ''
 
       for test_name, result in tests.iteritems():
-        if result == FAIL:
+        if FAIL in result.values():
           result_b = get_result(
               test_result_b, module_name, testcase_name, test_name)
 
-          if not only_failed_both or result_b == FAIL:
-            testcase_sub_summary += '    ' + test_name + ': ' + result_b + '\n'
-            writer.writerow([module_name, testcase_name, test_name, result_b])
+          testcase_sub_summary += '    ' + test_name + ': ' + result_b + '\n'
+          writer.writerow([module_name, testcase_name, test_name, result_b])
 
       if testcase_sub_summary:
         module_sub_summary = '  ' + testcase_name + '\n' + testcase_sub_summary
@@ -189,8 +190,6 @@
   parser.add_argument('test_result_a', help='path to first test_result.xml')
   parser.add_argument('test_result_b', help='path to second test_result.xml')
   parser.add_argument('--csv', default='diff.csv', help='path to csv output')
-  parser.add_argument('--only-failed-both', action='store_true',
-                      help='only list tests failed in both test_result.xml')
 
   args = parser.parse_args()
 
@@ -200,8 +199,7 @@
   print_test_infos(test_result_a, test_result_b)
 
   with open(args.csv, 'w') as csvfile:
-    summary = compare_failed_tests(test_result_a, test_result_b, csvfile,
-                                   args.only_failed_both)
+    summary = compare_failed_tests(test_result_a, test_result_b, csvfile)
 
     print summary
 
diff --git a/tools/repo_pull/gerrit.py b/tools/repo_pull/gerrit.py
new file mode 100755
index 0000000..5cb1b93
--- /dev/null
+++ b/tools/repo_pull/gerrit.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python3
+
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import print_function
+
+import argparse
+import base64
+import json
+import os
+import sys
+
+try:
+    from urllib.request import HTTPBasicAuthHandler, build_opener  # PY3
+except ImportError:
+    from urllib2 import HTTPBasicAuthHandler, build_opener  # PY2
+
+try:
+    from urllib.parse import urlencode, urlparse  # PY3
+except ImportError:
+    from urllib import urlencode  # PY2
+    from urlparse import urlparse  # PY2
+
+
+def load_auth_credentials_from_file(cookie_file):
+    """Load credentials from an opened .gitcookies file."""
+    credentials = {}
+    for lineno, line in enumerate(cookie_file, start=1):
+        if line.startswith('#HttpOnly_'):
+            line = line[len('#HttpOnly_'):]
+
+        if not line or line[0] == '#':
+            continue
+
+        row = line.split('\t')
+        if len(row) != 7:
+            continue
+
+        domain = row[0]
+        cookie = row[6]
+
+        sep = cookie.find('=')
+        if sep == -1:
+            continue
+        username = cookie[0:sep]
+        password = cookie[sep + 1:]
+
+        credentials[domain] = (username, password)
+    return credentials
+
+
+def load_auth_credentials(cookie_file_path):
+    """Load credentials from a .gitcookies file path."""
+    with open(cookie_file_path, 'r') as cookie_file:
+        return load_auth_credentials_from_file(cookie_file)
+
+
+def create_url_opener(cookie_file_path, domain):
+    """Load username and password from .gitcookies and return a URL opener with
+    an authentication handler."""
+
+    # Load authentication credentials
+    credentials = load_auth_credentials(cookie_file_path)
+    username, password = credentials[domain]
+
+    # Create URL opener with authentication handler
+    auth_handler = HTTPBasicAuthHandler()
+    auth_handler.add_password(domain, domain, username, password)
+    return build_opener(auth_handler)
+
+
+def create_url_opener_from_args(args):
+    """Create URL opener from command line arguments."""
+
+    domain = urlparse(args.gerrit).netloc
+
+    try:
+        return create_url_opener(args.gitcookies, domain)
+    except KeyError:
+        print('error: Cannot find the domain "{}" in "{}". '
+              .format(domain, args.gitcookies), file=sys.stderr)
+        print('error: Please check the Gerrit Code Review URL or follow the '
+              'instructions in '
+              'https://android.googlesource.com/platform/development/'
+              '+/master/tools/repo_pull#installation', file=sys.stderr)
+        sys.exit(1)
+
+
+def _decode_xssi_json(data):
+    """Trim XSSI protector and decode JSON objects."""
+
+    # Decode UTF-8
+    data = data.decode('utf-8')
+
+    # Trim cross site script inclusion (XSSI) protector
+    if data[0:4] != ')]}\'':
+        raise ValueError('unexpected responsed content: ' + data)
+    data = data[4:]
+
+    # Parse JSON objects
+    return json.loads(data)
+
+
+def query_change_lists(url_opener, gerrit, query_string, limits):
+    """Query change lists."""
+    data = [
+        ('q', query_string),
+        ('o', 'CURRENT_REVISION'),
+        ('o', 'CURRENT_COMMIT'),
+        ('n', str(limits)),
+    ]
+    url = gerrit + '/a/changes/?' + urlencode(data)
+
+    response_file = url_opener.open(url)
+    try:
+        return _decode_xssi_json(response_file.read())
+    finally:
+        response_file.close()
+
+
+def set_review(url_opener, gerrit_url, change_id, labels, message):
+    """Set review votes to a change list."""
+
+    url = '{}/a/changes/{}/revisions/current/review'.format(
+        gerrit_url, change_id)
+
+    data = {}
+    if labels:
+        data['labels'] = labels
+    if message:
+        data['message'] = message
+    data = json.dumps(data).encode('utf-8')
+
+    headers = {
+        'Content-Type': 'application/json; charset=UTF-8',
+    }
+
+    request = Request(url, data, headers)
+    response_file = url_opener.open(request)
+    try:
+        res_code = response_file.getcode()
+        res_json = _decode_xssi_json(response_file.read())
+        return (res_code, res_json)
+    finally:
+        response_file.close()
+
+
+def get_patch(url_opener, gerrit_url, change_id, revision_id='current'):
+    """Download the patch file."""
+
+    url = '{}/a/changes/{}/revisions/{}/patch'.format(
+        gerrit_url, change_id, revision_id)
+
+    response_file = url_opener.open(url)
+    try:
+        return base64.b64decode(response_file.read())
+    finally:
+        response_file.close()
+
+
+def _parse_args():
+    """Parse command line options."""
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('query', help='Change list query string')
+    parser.add_argument('-g', '--gerrit', required=True,
+                        help='Gerrit review URL')
+
+    parser.add_argument('--gitcookies',
+                        default=os.path.expanduser('~/.gitcookies'),
+                        help='Gerrit cookie file')
+    parser.add_argument('--limits', default=1000,
+                        help='Max number of change lists')
+
+    return parser.parse_args()
+
+
+def main():
+    args = _parse_args()
+
+    # Query change lists
+    url_opener = create_url_opener_from_args(args)
+    change_lists = query_change_lists(
+        url_opener, args.gerrit, args.query, args.limits)
+
+    # Print the result
+    json.dump(change_lists, sys.stdout, indent=4, separators=(', ', ': '))
+    print()  # Print the end-of-line
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/repo_pull/repo_patch.py b/tools/repo_pull/repo_patch.py
new file mode 100755
index 0000000..5905c2a
--- /dev/null
+++ b/tools/repo_pull/repo_patch.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A command line utility to download multiple patch files of change lists from
+Gerrit."""
+
+from __future__ import print_function
+
+from gerrit import create_url_opener_from_args, query_change_lists, get_patch
+
+import argparse
+import os
+
+
+def _parse_args():
+    """Parse command line options."""
+    parser = argparse.ArgumentParser()
+
+    parser.add_argument('query', help='Change list query string')
+    parser.add_argument('-g', '--gerrit', required=True,
+                        help='Gerrit review URL')
+
+    parser.add_argument('--gitcookies',
+                        default=os.path.expanduser('~/.gitcookies'),
+                        help='Gerrit cookie file')
+    parser.add_argument('--limits', default=1000,
+                        help='Max number of change lists')
+
+    return parser.parse_args()
+
+
+def main():
+    args = _parse_args()
+
+    # Query change lists
+    url_opener = create_url_opener_from_args(args)
+    change_lists = query_change_lists(
+        url_opener, args.gerrit, args.query, args.limits)
+
+    # Download patch files
+    num_changes = len(change_lists)
+    num_changes_width = len(str(num_changes))
+    for i, change in enumerate(change_lists, start=1):
+        print('{:>{}}/{} | {} {}'.format(
+                i, num_changes_width, num_changes, change['_number'],
+                change['subject']))
+
+        patch_file = get_patch(url_opener, args.gerrit, change['id'])
+        with open('{}.patch'.format(change['_number']), 'wb') as output_file:
+            output_file.write(patch_file)
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/repo_pull/repo_pull.py b/tools/repo_pull/repo_pull.py
index 3c40da6..1c41aae 100755
--- a/tools/repo_pull/repo_pull.py
+++ b/tools/repo_pull/repo_pull.py
@@ -20,6 +20,8 @@
 
 from __future__ import print_function
 
+from gerrit import create_url_opener_from_args, query_change_lists
+
 import argparse
 import collections
 import itertools
@@ -31,16 +33,6 @@
 import xml.dom.minidom
 
 try:
-    from urllib.parse import urlencode  # PY3
-except ImportError:
-    from urllib import urlencode  # PY2
-
-try:
-    from urllib.request import HTTPBasicAuthHandler, build_opener  # PY3
-except ImportError:
-    from urllib2 import HTTPBasicAuthHandler, build_opener  # PY2
-
-try:
     from __builtin__ import raw_input as input  # PY2
 except ImportError:
     pass
@@ -124,8 +116,19 @@
         self.number = change_list['_number']
 
         self.fetch = fetch
-        self.fetch_url = fetch['http']['url']
-        self.fetch_ref = fetch['http']['ref']
+
+        fetch_git = None
+        for protocol in ('http', 'sso', 'rpc'):
+            fetch_git = fetch.get(protocol)
+            if fetch_git:
+                break
+
+        if not fetch_git:
+            raise ValueError(
+                'unknown fetch protocols: ' + str(list(fetch.keys())))
+
+        self.fetch_url = fetch_git['url']
+        self.fetch_ref = fetch_git['ref']
 
         self.commit_sha1 = commit_sha1
         self.commit = commit
@@ -167,54 +170,6 @@
     return project_dirs
 
 
-def load_auth(cookie_file_path):
-    """Load username and password from .gitcookies and return an
-    HTTPBasicAuthHandler."""
-    auth_handler = HTTPBasicAuthHandler()
-    with open(cookie_file_path, 'r') as cookie_file:
-        for lineno, line in enumerate(cookie_file, start=1):
-            if line.startswith('#HttpOnly_'):
-                line = line[len('#HttpOnly_'):]
-            if not line or line[0] == '#':
-                continue
-            row = line.split('\t')
-            if len(row) != 7:
-                continue
-            domain = row[0]
-            cookie = row[6]
-            sep = cookie.find('=')
-            if sep == -1:
-                continue
-            username = cookie[0:sep]
-            password = cookie[sep + 1:]
-            auth_handler.add_password(domain, domain, username, password)
-    return auth_handler
-
-
-def query_change_lists(gerrit, query_string, gitcookies, limits):
-    """Query change lists."""
-    data = [
-        ('q', query_string),
-        ('o', 'CURRENT_REVISION'),
-        ('o', 'CURRENT_COMMIT'),
-        ('n', str(limits)),
-    ]
-    url = gerrit + '/a/changes/?' + urlencode(data)
-
-    auth_handler = load_auth(gitcookies)
-    opener = build_opener(auth_handler)
-
-    response_file = opener.open(url)
-    try:
-        # Trim cross site script inclusion (XSSI) protector
-        data = response_file.read().decode('utf-8')[4:]
-
-        # Parse responsed JSON
-        return json.loads(data)
-    finally:
-        response_file.close()
-
-
 def group_and_sort_change_lists(change_lists, project_dirs):
     """Build a dict that maps projects to a list of topologically sorted change
     lists."""
@@ -453,8 +408,8 @@
 
 def _get_change_lists_from_args(args):
     """Query the change lists by args."""
-    return query_change_lists(args.gerrit, args.query, args.gitcookies,
-                              args.limits)
+    url_opener = create_url_opener_from_args(args)
+    return query_change_lists(url_opener, args.gerrit, args.query, args.limits)
 
 
 def _get_local_branch_name_from_args(args):
diff --git a/tools/repo_pull/repo_review.py b/tools/repo_pull/repo_review.py
index 9a404c8..bae2853 100755
--- a/tools/repo_pull/repo_review.py
+++ b/tools/repo_pull/repo_review.py
@@ -20,113 +20,12 @@
 
 from __future__ import print_function
 
+from gerrit import create_url_opener_from_args, query_change_lists, set_review
+
 import argparse
-import collections
-import itertools
 import json
-import multiprocessing
 import os
-import re
 import sys
-import xml.dom.minidom
-
-try:
-    from urllib.parse import urlencode  # PY3
-except ImportError:
-    from urllib import urlencode  # PY2
-
-try:
-    from urllib.request import (
-        HTTPBasicAuthHandler, Request, build_opener)  # PY3
-except ImportError:
-    from urllib2 import HTTPBasicAuthHandler, Request, build_opener  # PY2
-
-
-def load_auth(cookie_file_path):
-    """Load username and password from .gitcookies and return an
-    HTTPBasicAuthHandler."""
-    auth_handler = HTTPBasicAuthHandler()
-    with open(cookie_file_path, 'r') as cookie_file:
-        for lineno, line in enumerate(cookie_file, start=1):
-            if line.startswith('#HttpOnly_'):
-                line = line[len('#HttpOnly_'):]
-            if not line or line[0] == '#':
-                continue
-            row = line.split('\t')
-            if len(row) != 7:
-                continue
-            domain = row[0]
-            cookie = row[6]
-            sep = cookie.find('=')
-            if sep == -1:
-                continue
-            username = cookie[0:sep]
-            password = cookie[sep + 1:]
-            auth_handler.add_password(domain, domain, username, password)
-    return auth_handler
-
-
-def _decode_xssi_json(data):
-    """Trim XSSI protector and decode JSON objects."""
-    # Trim cross site script inclusion (XSSI) protector
-    data = data.decode('utf-8')[4:]
-    # Parse JSON objects
-    return json.loads(data)
-
-
-def query_change_lists(gerrit, query_string, gitcookies, limits):
-    """Query change lists."""
-    data = [
-        ('q', query_string),
-        ('o', 'CURRENT_REVISION'),
-        ('o', 'CURRENT_COMMIT'),
-        ('n', str(limits)),
-    ]
-    url = gerrit + '/a/changes/?' + urlencode(data)
-
-    auth_handler = load_auth(gitcookies)
-    opener = build_opener(auth_handler)
-
-    response_file = opener.open(url)
-    try:
-        return _decode_xssi_json(response_file.read())
-    finally:
-        response_file.close()
-
-
-def set_review(gerrit, gitcookies, change_id, labels, message):
-    """Set review votes to a change list."""
-
-    url = '{}/a/changes/{}/revisions/current/review'.format(gerrit, change_id)
-
-    auth_handler = load_auth(gitcookies)
-    opener = build_opener(auth_handler)
-
-    data = {}
-    if labels:
-        data['labels'] = labels
-    if message:
-        data['message'] = message
-    data = json.dumps(data).encode('utf-8')
-
-    headers = {
-        'Content-Type': 'application/json; charset=UTF-8',
-    }
-
-    request = Request(url, data, headers)
-    response_file = opener.open(request)
-    try:
-        res_code = response_file.getcode()
-        res_json = _decode_xssi_json(response_file.read())
-        return (res_code, res_json)
-    finally:
-        response_file.close()
-
-
-def _get_change_lists_from_args(args):
-    """Query the change lists by args."""
-    return query_change_lists(args.gerrit, args.query, args.gitcookies,
-                              args.limits)
 
 
 def _get_labels_from_args(args):
@@ -208,8 +107,12 @@
     # Convert label arguments
     labels = _get_labels_from_args(args)
 
+    # Load authentication credentials
+    url_opener = create_url_opener_from_args(args)
+
     # Retrieve change lists
-    change_lists = _get_change_lists_from_args(args)
+    change_lists = query_change_lists(
+        url_opener, args.gerrit, args.query, args.limits)
     if not change_lists:
         print('error: No matching change lists.', file=sys.stderr)
         sys.exit(1)
@@ -225,8 +128,7 @@
     for change in change_lists:
         try:
             res_code, res_json = set_review(
-                args.gerrit, args.gitcookies, change['id'], labels,
-                args.message)
+                url_opener, args.gerrit, change['id'], labels, args.message)
         except HTTPError as e:
             res_code = e.code
             res_json = None
diff --git a/vndk/snapshot/build.sh b/vndk/snapshot/build.sh
index c4d9cd1..10d92d8 100755
--- a/vndk/snapshot/build.sh
+++ b/vndk/snapshot/build.sh
@@ -18,16 +18,22 @@
 export TARGET_BUILD_VARIANT=user
 export BOARD_VNDK_VERSION=current
 
-echo "-----Generating VNDK snapshot for arm64-armv8-a"
-make -j vndk dist TARGET_PRODUCT=aosp_arm64_ab
+echo "-----Generating VNDK snapshot for arm64"
+make -j vndk dist TARGET_PRODUCT=aosp_arm64
 
-echo "-----Generating VNDK snapshot for arm-armv7-a-neon"
+echo "-----Generating VNDK snapshot for arm, 64-bit binder"
+make -j vndk dist TARGET_PRODUCT=aosp_arm
+
+echo "-----Generating VNDK snapshot for arm, 32-bit binder"
 make -j vndk dist TARGET_PRODUCT=aosp_arm_ab
 
-echo "-----Generating VNDK snapshot for x86_64-x86_64"
-make -j vndk dist TARGET_PRODUCT=aosp_x86_64_ab
+echo "-----Generating VNDK snapshot for x86_64"
+make -j vndk dist TARGET_PRODUCT=aosp_x86_64
 
-echo "-----Generating VNDK snapshot for x86-x86"
+echo "-----Generating VNDK snapshot for x86, 64-bit binder"
+make -j vndk dist TARGET_PRODUCT=aosp_x86
+
+echo "-----Generating VNDK snapshot for x86, 32-bit binder"
 make -j vndk dist TARGET_PRODUCT=aosp_x86_ab
 
 echo "-----Running tests"
diff --git a/vndk/snapshot/test.sh b/vndk/snapshot/test.sh
index 7938d05..d73ea74 100755
--- a/vndk/snapshot/test.sh
+++ b/vndk/snapshot/test.sh
@@ -24,214 +24,229 @@
 
 set -eo pipefail
 
-if [ "$#" -ne 1 ]; then
-    echo "Usage: \"$0 all\" to test all four VNDK snapshot variants at once."
-    echo "Usage: \"$0 TARGET_ARCH\" to test a VNDK snapshot of a specific arch."
+if [[ "$#" -ne 1 ]]; then
+    echo "Usage: \"$0 all\" to test all VNDK snapshot variants at once."
+    echo "       \"$0 \$TARGET_PRODUCT\" to test a specific VNDK snapshot."
     exit 1
 fi
 
-if [[ $1 == 'all' ]]; then
-    ARCHS=('arm' 'arm64' 'x86' 'x86_64')
+if [[ "$1" == 'all' ]]; then
+    readonly TARGET_PRODUCTS=('aosp_arm' 'aosp_arm_ab' 'aosp_arm64' 'aosp_x86' 'aosp_x86_ab' 'aosp_x86_64')
 else
-    ARCHS=($1)
+    readonly TARGET_PRODUCTS=($1)
 fi
 
-script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-ANDROID_BUILD_TOP=$(dirname $(dirname $(dirname $script_dir)))
-echo "ANDROID_BUILD_TOP: $ANDROID_BUILD_TOP"
+script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+readonly ANDROID_BUILD_TOP=$(dirname $(dirname $(dirname "${script_dir}")))
+echo "ANDROID_BUILD_TOP: "${ANDROID_BUILD_TOP}""
 
 OUT_DIR=${OUT_DIR:-}
 DIST_DIR=${DIST_DIR:-}
-if [[ -z $DIST_DIR ]]; then
-    if [[ -z $OUT_DIR ]]; then
-        DIST_DIR=$ANDROID_BUILD_TOP/out/dist
+if [[ -z "${DIST_DIR}" ]]; then
+    if [[ -z "${OUT_DIR}" ]]; then
+        DIST_DIR="${ANDROID_BUILD_TOP}"/out/dist
     else
-        DIST_DIR=$OUT_DIR/dist
+        DIST_DIR="${OUT_DIR}"/dist
     fi
 fi
 
 # Get PLATFORM_VNDK_VERSION
-source "$ANDROID_BUILD_TOP/build/envsetup.sh" >/dev/null
-PLATFORM_VNDK_VERSION=`get_build_var PLATFORM_VNDK_VERSION`
+source ""${ANDROID_BUILD_TOP}"/build/envsetup.sh" >/dev/null
+readonly PLATFORM_VNDK_VERSION="$(get_build_var PLATFORM_VNDK_VERSION)"
 
-SNAPSHOT_TOP=$DIST_DIR/android-vndk-snapshot
-SNAPSHOT_TEMPFILE=$DIST_DIR/snapshot_libs.txt
-SYSTEM_TEMPFILE=$DIST_DIR/system_libs.txt
+readonly TEMP_DIR="$(mktemp -d /tmp/"$(basename $0)"_XXXXXXXX)"
+readonly SNAPSHOT_TOP="${TEMP_DIR}"/android-vndk-snapshot
+readonly SNAPSHOT_TEMPFILE="${TEMP_DIR}"/snapshot_libs.txt
+readonly SYSTEM_TEMPFILE="${TEMP_DIR}"/system_libs.txt
+readonly BINDER_32_DIRNAME='binder32'
 
-RED='\033[0;31m'
-NC='\033[0m'
-PASS="::: PASS :::"
-FAIL="${RED}::: FAIL :::${NC}"
+readonly RED='\033[0;31m'
+readonly NC='\033[0m'
+readonly PASS="::: PASS :::"
+readonly FAIL=""${RED}"::: FAIL :::"${NC}""
 
 
-function remove_unzipped_snapshot {
-    if [ -d $SNAPSHOT_TOP ]; then
-        echo "Removing $SNAPSHOT_TOP"
-        rm -rf $SNAPSHOT_TOP
-    fi
+function set_vars() {
+    TARGET_PRODUCT="$1"
+    ARCH=''
+    PRODUCT_OUT=''
+    BITNESS_SUFFIX=''
+    BINDER_BITNESS_PATH=''
+    TARGET_2ND_ARCH=''
+    case "$1" in
+        aosp_arm64)
+            ARCH='arm64'
+            PRODUCT_OUT='generic_arm64'
+            BITNESS_SUFFIX='64'
+            TARGET_2ND_ARCH='arm'
+            ;;
+        aosp_arm)
+            ARCH='arm'
+            PRODUCT_OUT='generic'
+            ;;
+        aosp_arm_ab)
+            ARCH='arm'
+            PRODUCT_OUT='generic_arm_ab'
+            BINDER_BITNESS_PATH="${BINDER_32_DIRNAME}"
+            ;;
+        aosp_x86_64)
+            ARCH='x86_64'
+            PRODUCT_OUT='generic_x86_64'
+            BITNESS_SUFFIX='64'
+            TARGET_2ND_ARCH='x86'
+            ;;
+        aosp_x86)
+            ARCH='x86'
+            PRODUCT_OUT='generic_x86'
+            ;;
+        aosp_x86_ab)
+            ARCH='x86'
+            PRODUCT_OUT='generic_x86'
+            BINDER_BITNESS_PATH="${BINDER_32_DIRNAME}"
+            ;;
+        *)
+            echo "Unrecognized \$TARGET_PRODUCT: "$1""
+            exit 1
+            ;;
+    esac
 }
 
 
+function cleanup {
+    echo "[Cleanup]"
+    echo "Removing TEMP_DIR: "${TEMP_DIR}""
+    rm -rf ""${TEMP_DIR}""
+}
+trap cleanup EXIT
+
+
 #######################################
 # Compares the list of VNDK-core and VNDK-SP
 # libs included in the snapshot and installed
 # under $PRODUCT_OUT/system/lib*
 #
 # Arguments:
-#   $1: vndk_type: string, one of [vndk-core, vndk-sp]
-#   $2: target_arch: string, one of [arm, arm64, x86, x86_64]
+#   $1: vndk_type: one of [vndk-core, vndk-sp]
 #######################################
 function compare_vndk_libs() {
-    local vndk_type=$1
-    local target_arch=$2
-    local target_arch_2nd=''
-    local product
-    local bitness
-    local snapshot_dir
-    local snapshot_dir_2nd
+    local vndk_type="$1"
     local vndk_dir_suffix
     local system_vndk_dir
+    local snapshot_dir
+    local snapshot_dir_2nd
     local system_lib_dir
     local system_lib_dir_2nd
 
-    if [[ $target_arch == 'arm64' ]]; then
-        product='generic_arm64_ab'
-        target_arch_2nd='arm'
-    elif [[ $target_arch == 'arm' ]]; then
-        product='generic_arm_ab'
-    elif [[ $target_arch == 'x86_64' ]]; then
-        product='generic_x86_64_ab'
-        target_arch_2nd='x86'
-    elif [[ $target_arch == 'x86' ]]; then
-        product='generic_x86_ab'
-    fi
-
-    if [[ ${target_arch:-2:length} =~ '64' ]]; then
-        bitness='64'
-    else
-        bitness=''
-    fi
-
-    if [[ -z $PLATFORM_VNDK_VERSION ]]; then
+    if [[ -z "${PLATFORM_VNDK_VERSION}" ]]; then
         vndk_dir_suffix=""
     else
-        vndk_dir_suffix="-$PLATFORM_VNDK_VERSION"
+        vndk_dir_suffix="-${PLATFORM_VNDK_VERSION}"
     fi
 
-    if [[ $vndk_type == 'vndk-core' ]]; then
+    if [[ "${vndk_type}" == 'vndk-core' ]]; then
         system_vndk_dir="vndk${vndk_dir_suffix}"
     else
         system_vndk_dir="vndk-sp${vndk_dir_suffix}"
     fi
 
     function diff_vndk_dirs() {
-        local snapshot=$1
-        local system=$2
-        local local_module_target_arch=$3
+        local snapshot="$1"
+        local system="$2"
+        local target_arch="$3"
 
-        ls -1 $snapshot > $SNAPSHOT_TEMPFILE
-        find $system -type f | xargs -n 1 -I file bash -c "basename file" | sort > $SYSTEM_TEMPFILE
+        ls -1 ${snapshot} > "${SNAPSHOT_TEMPFILE}"
+        find "${system}" -type f | xargs -n 1 -I file bash -c "basename file" | sort > "${SYSTEM_TEMPFILE}"
 
-        echo "Comparing libs for VNDK=$vndk_type, SNAPSHOT_VARIANT=$target_arch, ARCH=$local_module_target_arch"
-        echo "Snapshot dir: $snapshot"
-        echo "System dir: $system"
+        echo "Comparing libs for TARGET_PRODUCT="${TARGET_PRODUCT}", VNDK="${vndk_type}", ARCH="${target_arch}""
+        echo "Snapshot dir:" ${snapshot}
+        echo "System dir: "${system}""
         (diff --old-line-format="Only found in VNDK snapshot: %L" \
               --new-line-format="Only found in /system/lib*: %L" \
               --unchanged-line-format="" \
-              $SNAPSHOT_TEMPFILE $SYSTEM_TEMPFILE && echo $PASS) \
-        || (echo -e $FAIL; exit 1)
+              "${SNAPSHOT_TEMPFILE}" "${SYSTEM_TEMPFILE}" && echo "${PASS}") \
+        || (echo -e "${FAIL}"; exit 1)
     }
 
-    snapshot_dir=$SNAPSHOT_TOP/$target_arch/arch-$target_arch-*/shared/$vndk_type
-    system_lib_dir=$ANDROID_BUILD_TOP/out/target/product/$product/system/lib$bitness/$system_vndk_dir
-    diff_vndk_dirs $snapshot_dir $system_lib_dir $target_arch
+    if [[ -n "${BINDER_BITNESS_PATH}" ]]; then
+        snapshot_dir="${SNAPSHOT_TOP}"/"${ARCH}"/"${BINDER_BITNESS_PATH}"/arch-"${ARCH}"-*/shared/"${vndk_type}"
+    else
+        snapshot_dir="${SNAPSHOT_TOP}"/"${ARCH}"/arch-"${ARCH}"-*/shared/"${vndk_type}"
+    fi
 
-    if [[ -n $target_arch_2nd ]]; then
-        snapshot_dir_2nd=$SNAPSHOT_TOP/$target_arch/arch-$target_arch_2nd-*/shared/$vndk_type
-        system_lib_dir_2nd=$ANDROID_BUILD_TOP/out/target/product/$product/system/lib/$system_vndk_dir
-        diff_vndk_dirs $snapshot_dir_2nd $system_lib_dir_2nd $target_arch_2nd
+    system_lib_dir="${ANDROID_BUILD_TOP}"/out/target/product/"${PRODUCT_OUT}"/system/lib"${BITNESS_SUFFIX}"/"${system_vndk_dir}"
+    diff_vndk_dirs "${snapshot_dir}" $system_lib_dir "${ARCH}"
+
+    if [[ -n "${TARGET_2ND_ARCH}" ]]; then
+        snapshot_dir_2nd="${SNAPSHOT_TOP}"/"${ARCH}"/arch-"${TARGET_2ND_ARCH}"-*/shared/"${vndk_type}"
+        system_lib_dir_2nd="${ANDROID_BUILD_TOP}"/out/target/product/"${PRODUCT_OUT}"/system/lib/"${system_vndk_dir}"
+        diff_vndk_dirs "${snapshot_dir_2nd}" "${system_lib_dir_2nd}" "${TARGET_2ND_ARCH}"
     fi
 }
 
 
 #######################################
-# Executes testcases against VNDK snapshot of specified arch
+# Executes tests against VNDK snapshot of
+# specified $TARGET_PRODUCT
 #
 # Arguments:
-#   $1: arch: string, one of [arm, arm64, x86, x86_64]
+#   $1: TARGET_PRODUCT
 #######################################
-function run_test_cases() {
-    local arch=$1
-    local snapshot_zip=$DIST_DIR/android-vndk-$arch.zip
-    local snapshot_variant_top=$SNAPSHOT_TOP/$arch
+function run_tests() {
+    set_vars "$1"
+    local snapshot_zip="${DIST_DIR}"/android-vndk-"${TARGET_PRODUCT}".zip
+    local snapshot_variant_top="${SNAPSHOT_TOP}"/"${ARCH}"
 
-    echo "[Setup] Unzipping \"android-vndk-$arch.zip\""
-    unzip -q $snapshot_zip -d $SNAPSHOT_TOP
+    echo "[Setup] Unzipping \"android-vndk-"${TARGET_PRODUCT}".zip\""
+    unzip -qn "${snapshot_zip}" -d "${SNAPSHOT_TOP}"
 
     echo "[Test] Comparing VNDK-core and VNDK-SP libs in snapshot vs /system/lib*"
-    compare_vndk_libs 'vndk-core' $arch
-    compare_vndk_libs 'vndk-sp' $arch
+    compare_vndk_libs 'vndk-core'
+    compare_vndk_libs 'vndk-sp'
 
     echo "[Test] Checking required config files are present"
-
-    if [[ -z $PLATFORM_VNDK_VERSION ]]; then
+    if [[ -z "${PLATFORM_VNDK_VERSION}" ]]; then
         config_file_suffix=""
     else
-        config_file_suffix=".$PLATFORM_VNDK_VERSION"
+        config_file_suffix=".${PLATFORM_VNDK_VERSION}"
     fi
 
     config_files=(
-        "ld.config$config_file_suffix.txt"
-        "llndk.libraries$config_file_suffix.txt"
-        "vndksp.libraries$config_file_suffix.txt"
+        "ld.config"${config_file_suffix}".txt"
+        "llndk.libraries"${config_file_suffix}".txt"
+        "vndksp.libraries"${config_file_suffix}".txt"
         "vndkcore.libraries.txt"
         "vndkprivate.libraries.txt"
         "module_paths.txt")
     for config_file in "${config_files[@]}"; do
-        config_file_abs_path=$snapshot_variant_top/configs/$config_file
-        if [ ! -e $config_file_abs_path ]; then
-            echo -e "$FAIL The file \"$config_file_abs_path\" was not found in snapshot."
+        config_file_abs_path="${snapshot_variant_top}"/configs/"${config_file}"
+        if [[ ! -e "${config_file_abs_path}" ]]; then
+            echo -e ""${FAIL}" The file \""${config_file_abs_path}"\" was not found in snapshot."
             exit 1
         else
-            echo "$PASS Found $config_file"
+            echo ""${PASS}" Found "${config_file}""
         fi
     done
 
     echo "[Test] Checking directory structure of snapshot"
     directories=(
-        'configs/'
-        'NOTICE_FILES/')
+        "configs/"
+        "NOTICE_FILES/")
     for sub_dir in "${directories[@]}"; do
-        dir_abs_path=$snapshot_variant_top/$sub_dir
-        if [ ! -d $dir_abs_path ]; then
-            echo -e "$FAIL The directory \"$dir_abs_path\" was not found in snapshot."
+        dir_abs_path="${snapshot_variant_top}"/"${sub_dir}"
+        if [[ ! -d "${dir_abs_path}" ]]; then
+            echo -e ""${FAIL}" The directory \""${dir_abs_path}"\" was not found in snapshot."
             exit 1
         else
-            echo "$PASS Found $sub_dir"
+            echo ""${PASS}" Found "${sub_dir}""
         fi
     done
 }
 
 
-#######################################
-# Cleanup
-#######################################
-function cleanup {
-    echo "[Cleanup]"
-    remove_unzipped_snapshot
-    echo "[Cleanup] Removing temp files..."
-    rm -f $SNAPSHOT_TEMPFILE $SYSTEM_TEMPFILE
-}
-trap cleanup EXIT
-
-
-#######################################
-# Run testcases
-#######################################
-remove_unzipped_snapshot
-for arch in "${ARCHS[@]}"; do
-    echo -e "\n::::::::: Running testcases for ARCH=$arch :::::::::"
-    run_test_cases $arch
+# Run tests for each target product
+for target_product in "${TARGET_PRODUCTS[@]}"; do
+    echo -e "\n::::::::: Running tests for TARGET_PRODUCT="${target_product}" :::::::::"
+    run_tests "${target_product}"
 done
 
-echo "All tests passed!"
+echo "Done. All tests passed!"
diff --git a/vndk/tools/definition-tool/vndk_definition_tool.py b/vndk/tools/definition-tool/vndk_definition_tool.py
index 0179de2..62631cb 100755
--- a/vndk/tools/definition-tool/vndk_definition_tool.py
+++ b/vndk/tools/definition-tool/vndk_definition_tool.py
@@ -905,11 +905,13 @@
         except IOError:
             return b''
 
+
     @classmethod
     def is_zipfile(cls, apk_file_path):
         magic = cls._read_first_bytes(apk_file_path, 2)
         return magic == b'PK' and zipfile.is_zipfile(apk_file_path)
 
+
     @classmethod
     def enumerate_dex_strings_apk(cls, apk_file_path):
         with zipfile.ZipFile(apk_file_path, 'r') as zip_file:
@@ -921,42 +923,109 @@
                 except KeyError:
                     break
 
+
     @classmethod
     def is_vdex_file(cls, vdex_file_path):
         return vdex_file_path.endswith('.vdex')
 
-    VdexHeader = create_struct('VdexHeader', (
+
+    # VdexHeader 0
+    VdexHeader0 = create_struct('VdexHeader0', (
         ('magic', '4s'),
-        ('version', '4s'),
+        ('vdex_version', '4s'),
+    ))
+
+
+    # VdexHeader 1 - 15
+    VdexHeader1 = create_struct('VdexHeader1', (
+        ('magic', '4s'),
+        ('vdex_version', '4s'),
         ('number_of_dex_files', 'I'),
         ('dex_size', 'I'),
-        # ('dex_shared_data_size', 'I'),  # >= 016
         ('verifier_deps_size', 'I'),
         ('quickening_info_size', 'I'),
+        # checksums
     ))
 
+
+    # VdexHeader 16 - 18
+    VdexHeader16 = create_struct('VdexHeader16', (
+        ('magic', '4s'),
+        ('vdex_version', '4s'),
+        ('number_of_dex_files', 'I'),
+        ('dex_size', 'I'),
+        ('dex_shared_data_size', 'I'),
+        ('verifier_deps_size', 'I'),
+        ('quickening_info_size', 'I'),
+        # checksums
+    ))
+
+
+    # VdexHeader 19
+    VdexHeader19 = create_struct('VdexHeader19', (
+        ('magic', '4s'),
+        ('vdex_version', '4s'),
+        ('dex_section_version', '4s'),
+        ('number_of_dex_files', 'I'),
+        ('verifier_deps_size', 'I'),
+        # checksums
+    ))
+
+
+    DexSectionHeader = create_struct('DexSectionHeader', (
+        ('dex_size', 'I'),
+        ('dex_shared_data_size', 'I'),
+        ('quickening_info_size', 'I'),
+    ))
+
+
     @classmethod
     def enumerate_dex_strings_vdex_buf(cls, buf):
         buf = get_py3_bytes(buf)
-        vdex_header = cls.VdexHeader.unpack_from(buf, offset=0)
 
-        quickening_table_off_size = 0
-        if vdex_header.version > b'010\x00':
-            quickening_table_off_size = 4
+        magic, version = struct.unpack_from('4s4s', buf)
 
-        # Skip vdex file header size
-        offset = cls.VdexHeader.struct_size
+        # Check the vdex file magic word
+        if magic != b'vdex':
+            raise ValueError('bad vdex magic word')
 
-        # Skip `dex_shared_data_size`
-        if vdex_header.version >= b'016\x00':
-            offset += 4
+        # Parse vdex file header (w.r.t. version)
+        if version == b'000\x00':
+            VdexHeader = cls.VdexHeader0
+        elif version >= b'001\x00' and version < b'016\x00':
+            VdexHeader = cls.VdexHeader1
+        elif version >= b'016\x00' and version < b'019\x00':
+            VdexHeader = cls.VdexHeader16
+        elif version == b'019\x00':
+            VdexHeader = cls.VdexHeader19
+        else:
+            raise ValueError('unknown vdex version ' + repr(version))
 
-        # Skip dex file checksums size
-        offset += 4 * vdex_header.number_of_dex_files
+        vdex_header = VdexHeader.unpack_from(buf, offset=0)
 
         # Skip this vdex file if there is no dex file section
-        if vdex_header.dex_size == 0:
-            return
+        if vdex_header.vdex_version < b'019\x00':
+            if vdex_header.dex_size == 0:
+                return
+        else:
+            if vdex_header.dex_section_version == b'000\x00':
+                return
+
+        # Skip vdex file header struct
+        offset = VdexHeader.struct_size
+
+        # Skip dex file checksums struct
+        offset += 4 * vdex_header.number_of_dex_files
+
+        # Skip dex section header struct
+        if vdex_header.vdex_version >= b'019\x00':
+            offset += cls.DexSectionHeader.struct_size
+
+        # Calculate the quickening table offset
+        if vdex_header.vdex_version >= b'012\x00':
+            quickening_table_off_size = 4
+        else:
+            quickening_table_off_size = 0
 
         for i in range(vdex_header.number_of_dex_files):
             # Skip quickening_table_off size
@@ -971,8 +1040,11 @@
             dex_file_end = offset + dex_header.file_size
             for s in cls.enumerate_dex_strings_buf(buf, offset):
                 yield s
+
+            # Align to the end of the dex file
             offset = (dex_file_end + 3) // 4 * 4
 
+
     @classmethod
     def enumerate_dex_strings_vdex(cls, vdex_file_path):
         with open(vdex_file_path, 'rb') as vdex_file:
@@ -1510,10 +1582,13 @@
         """Read ro.vendor.version property from vendor partitions."""
         for vendor_dir in vendor_dirs:
             path = os.path.join(vendor_dir, 'default.prop')
-            with open(path, 'r') as property_file:
-                result = cls._get_property(property_file, 'ro.vndk.version')
-                if result is not None:
-                    return result
+            try:
+                with open(path, 'r') as property_file:
+                    result = cls._get_property(property_file, 'ro.vndk.version')
+                    if result is not None:
+                        return result
+            except FileNotFoundError:
+                pass
         return None
 
 
@@ -2619,6 +2694,9 @@
                 continue
         except FileNotFoundError:
             continue
+        except:
+            print('error: Failed to parse', path, file=sys.stderr)
+            raise
 
         # Skip the file that does not call System.loadLibrary()
         if 'loadLibrary' not in strs: