[automerger skipped] Merge changes from topic "ota_for_generic_boot_image" into sc-dev am: 9bbb60e141 -s ours

am skip reason: Merged-In I43dd8e206bcd1c9f3f5f51869f126791c50af01a with SHA-1 b1627779fb is already in history

Original change: https://googleplex-android-review.googlesource.com/c/platform/build/+/15289192

Change-Id: Ic1700a13cb5e765523cd470c7fee2bcdd7fc0c52
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 47fd53a..639c4ef 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -757,6 +757,9 @@
 # vendor-ramdisk renamed to vendor_ramdisk
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor-ramdisk)
 
+# Common R directory has been removed.
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/R)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/core/Makefile b/core/Makefile
index 981e29b..628fd37 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -815,8 +815,13 @@
 
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
+$(INSTALLED_RAMDISK_TARGET): PRIVATE_DIRS := debug_ramdisk dev metadata mnt proc second_stage_resources sys
 $(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
 	$(call pretty,"Target ramdisk: $@")
+	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/,$(PRIVATE_DIRS))
+ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
+	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/first_stage_ramdisk/,$(PRIVATE_DIRS))
+endif
 	$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
 
 .PHONY: ramdisk-nodeps
@@ -4263,7 +4268,6 @@
 INTERNAL_OTATOOLS_MODULES := \
   aapt2 \
   add_img_to_target_files \
-  aftltool \
   apksigner \
   append2simg \
   avbtool \
@@ -4801,6 +4805,7 @@
 ifdef BUILDING_VENDOR_BOOT_IMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
   # The vendor ramdisk may be built from the recovery ramdisk.
   ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
     $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
@@ -5392,25 +5397,34 @@
 endif # BUILD_OS == linux
 
 DEXPREOPT_CONFIG_ZIP := $(PRODUCT_OUT)/dexpreopt_config.zip
-$(DEXPREOPT_CONFIG_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
-	    $(INTERNAL_RAMDISK_FILES) \
-	    $(INTERNAL_USERDATAIMAGE_FILES) \
-	    $(INTERNAL_VENDORIMAGE_FILES) \
-	    $(INTERNAL_PRODUCTIMAGE_FILES) \
-	    $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-	    $(DEX_PREOPT_CONFIG_FOR_MAKE) \
-	    $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
+
+$(DEXPREOPT_CONFIG_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
+    $(INSTALLED_VENDORIMAGE_TARGET) \
+    $(INSTALLED_ODMIMAGE_TARGET) \
+    $(INSTALLED_PRODUCTIMAGE_TARGET) \
+
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(DEXPREOPT_CONFIG_ZIP): $(DEX_PREOPT_CONFIG_FOR_MAKE) \
+	  $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) \
+
+endif
 
 $(DEXPREOPT_CONFIG_ZIP): $(SOONG_ZIP)
 	$(hide) mkdir -p $(dir $@) $(PRODUCT_OUT)/dexpreopt_config
+
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
 ifneq (,$(DEX_PREOPT_CONFIG_FOR_MAKE))
 	$(hide) cp $(DEX_PREOPT_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
 endif
 ifneq (,$(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE))
 	$(hide) cp $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE) $(PRODUCT_OUT)/dexpreopt_config
 endif
+endif #!TARGET_BUILD_UNBUNDLED
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(PRODUCT_OUT)/dexpreopt_config -D $(PRODUCT_OUT)/dexpreopt_config
 
+.PHONY: dexpreopt_config_zip
+dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
+
 # -----------------------------------------------------------------
 # A zip of the symbols directory.  Keep the full paths to make it
 # more obvious where these files came from.
diff --git a/core/OWNERS b/core/OWNERS
index 5456d4f..8794434 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,5 +1,5 @@
-per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,dbrazdil@google.com
-per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,mathieuc@google.com
+per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,skvadrik@google.com
+per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,skvadrik@google.com
 
 # For version updates
 per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 9673c45..878bd2d 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -28,6 +28,8 @@
 
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
 $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
+$(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
+$(call add_soong_config_var,ANDROID,BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 
 # TODO(b/172480615): Remove when platform uses ART Module prebuilts by default.
 ifeq (,$(filter art_module,$(SOONG_CONFIG_NAMESPACES)))
diff --git a/core/board_config.mk b/core/board_config.mk
index 1b08f9a..53dbb92 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -766,8 +766,8 @@
 endif
 
 ###########################################
-# APEXes are by default flattened, i.e. non-updatable.
-# It can be unflattened (and updatable) by inheriting from
+# APEXes are by default flattened, i.e. non-updatable, if not building unbundled
+# apps. It can be unflattened (and updatable) by inheriting from
 # updatable_apex.mk
 #
 # APEX flattening can also be forcibly enabled (resp. disabled) by
@@ -776,7 +776,7 @@
 ifdef OVERRIDE_TARGET_FLATTEN_APEX
   TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
 else
-  ifeq (,$(TARGET_FLATTEN_APEX))
+  ifeq (,$(TARGET_BUILD_APPS)$(TARGET_FLATTEN_APEX))
     TARGET_FLATTEN_APEX := true
   endif
 endif
diff --git a/core/config.mk b/core/config.mk
index 93c5db1..ceade45 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -445,6 +445,11 @@
 ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),)
 ALLOW_MISSING_DEPENDENCIES := true
 endif
+# Mac builds default to ALLOW_MISSING_DEPENDENCIES, at least until the host
+# tools aren't enabled by default for Mac.
+ifeq ($(HOST_OS),darwin)
+  ALLOW_MISSING_DEPENDENCIES := true
+endif
 .KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
 
 TARGET_BUILD_USE_PREBUILT_SDKS :=
@@ -603,7 +608,7 @@
 # Path to tools.jar
 HOST_JDK_TOOLS_JAR := $(ANDROID_JAVA8_HOME)/lib/tools.jar
 
-APICHECK_COMMAND := $(JAVA) -Xmx4g -jar $(APICHECK) --no-banner --compatible-output=no
+APICHECK_COMMAND := $(JAVA) -Xmx4g -jar $(APICHECK) --no-banner
 
 # Boolean variable determining if the allow list for compatible properties is enabled
 PRODUCT_COMPATIBLE_PROPERTY := true
@@ -792,6 +797,7 @@
     28.0 \
     29.0 \
     30.0 \
+    31.0 \
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
diff --git a/core/definitions.mk b/core/definitions.mk
index c5fe76b..0fd023a 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -598,7 +598,7 @@
 $(_dir)/$(1).meta_lic : $(_deps) $(_notices) $(foreach b,$(_tgts), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
 	rm -f $$@
 	mkdir -p $$(dir $$@)
-	build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(PRIVATE_IS_CONTAINER),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
+	build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(PRIVATE_IS_CONTAINER),-is_container) -p '$$(PRIVATE_PACKAGE_NAME)' -o $$@
 
 .PHONY: $(1).meta_lic
 $(1).meta_lic : $(_dir)/$(1).meta_lic
@@ -1106,11 +1106,11 @@
 $(hide) mkdir -p $(dir $@)
 $(hide) $(BCC_COMPAT) -O3 -o $(dir $@)/$(notdir $(<:.bc=.o)) -fPIC -shared \
   -rt-path $(RS_PREBUILT_CLCORE) -mtriple $(RS_COMPAT_TRIPLE) $<
-$(hide) $(PRIVATE_CXX_LINK) -shared -Wl,-soname,$(notdir $@) -nostdlib \
+$(hide) $(PRIVATE_CXX_LINK) -fuse-ld=lld -target $(CLANG_TARGET_TRIPLE) -shared -Wl,-soname,$(notdir $@) -nostdlib \
   -Wl,-rpath,\$$ORIGIN/../lib \
   $(dir $@)/$(notdir $(<:.bc=.o)) \
   $(RS_PREBUILT_COMPILER_RT) \
-  -o $@ $(CLANG_TARGET_GLOBAL_LDFLAGS) -Wl,--hash-style=sysv \
+  -o $@ $(CLANG_TARGET_GLOBAL_LLDFLAGS) -Wl,--hash-style=sysv \
   -L $(SOONG_OUT_DIR)/ndk/platforms/android-$(PRIVATE_SDK_VERSION)/arch-$(TARGET_ARCH)/usr/lib64 \
   -L $(SOONG_OUT_DIR)/ndk/platforms/android-$(PRIVATE_SDK_VERSION)/arch-$(TARGET_ARCH)/usr/lib \
   $(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupport)/libRSSupport.so \
@@ -1928,21 +1928,10 @@
 # b/37750224
 AAPT_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
 
-# Search for generated R.java/Manifest.java in $1, copy the found R.java as $2.
-# Also copy them to a central 'R' directory to make it easier to add the files to an IDE.
+# Search for generated R.java in $1, copy the found R.java as $2.
 define find-generated-R.java
-$(hide) for GENERATED_MANIFEST_FILE in `find $(1) \
-  -name Manifest.java 2> /dev/null`; do \
-    dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_MANIFEST_FILE`; \
-    mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-    cp $$GENERATED_MANIFEST_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-  done;
 $(hide) for GENERATED_R_FILE in `find $(1) \
   -name R.java 2> /dev/null`; do \
-    dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_R_FILE`; \
-    mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-    cp $$GENERATED_R_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir \
-      || exit 31; \
     cp $$GENERATED_R_FILE $(2) || exit 32; \
   done;
 @# Ensure that the target file is always created, i.e. also in case we did not
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index a2837f3..fcdfa82 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -278,6 +278,7 @@
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
 my_dexpreopt_infix := boot
+my_create_dexpreopt_config :=
 ifeq (true, $(DEXPREOPT_USE_ART_IMAGE))
   my_dexpreopt_infix := art
 endif
@@ -293,7 +294,16 @@
       LOCAL_UNCOMPRESS_DEX := true
     endif
   endif
+  my_create_dexpreopt_config := true
+endif
 
+# dexpreopt is disabled when TARGET_BUILD_UNBUNDLED_IMAGE is true,
+# but dexpreopt config files are required to dexpreopt in post-processing.
+ifeq ($(TARGET_BUILD_UNBUNDLED_IMAGE),true)
+  my_create_dexpreopt_config := true
+endif
+
+ifeq ($(my_create_dexpreopt_config), true)
   ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
     my_module_multilib := $(LOCAL_MULTILIB)
     # If the module is not an SDK library and it's a system server jar, only preopt the primary arch.
@@ -402,8 +412,6 @@
 
   my_dexpreopt_config := $(intermediates)/dexpreopt.config
   my_dexpreopt_config_for_postprocessing := $(PRODUCT_OUT)/dexpreopt_config/$(LOCAL_MODULE)_dexpreopt.config
-  my_dexpreopt_script := $(intermediates)/dexpreopt.sh
-  my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
   my_dexpreopt_config_merger := $(BUILD_SYSTEM)/dex_preopt_config_merger.py
 
   $(my_dexpreopt_config): $(my_dexpreopt_dep_configs) $(my_dexpreopt_config_merger)
@@ -416,6 +424,13 @@
 	echo -e -n '$(subst $(newline),\n,$(subst ','\'',$(subst \,\\,$(PRIVATE_CONTENTS))))' > $@
 	$(PRIVATE_CONFIG_MERGER) $@ $(PRIVATE_DEP_CONFIGS)
 
+$(eval $(call copy-one-file,$(my_dexpreopt_config),$(my_dexpreopt_config_for_postprocessing)))
+
+$(LOCAL_INSTALLED_MODULE): $(my_dexpreopt_config_for_postprocessing)
+
+ifdef LOCAL_DEX_PREOPT
+  my_dexpreopt_script := $(intermediates)/dexpreopt.sh
+  my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
@@ -431,8 +446,6 @@
 	-dexpreopt_script $@ \
 	-out_dir $(OUT_DIR)
 
-  $(eval $(call copy-one-file,$(my_dexpreopt_config),$(my_dexpreopt_config_for_postprocessing)))
-
   my_dexpreopt_deps := $(my_dex_jar)
   my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
   my_dexpreopt_deps += \
@@ -468,7 +481,6 @@
 
   $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
   $(LOCAL_INSTALLED_MODULE): $(my_dexpreopt_zip)
-  $(LOCAL_INSTALLED_MODULE): $(my_dexpreopt_config_for_postprocessing)
 
   $(my_all_targets): $(my_dexpreopt_zip)
 
@@ -477,3 +489,4 @@
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
+endif # my_create_dexpreopt_config
\ No newline at end of file
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 8c25086..bb1aa1e 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -293,8 +293,11 @@
 #################################################################
 # Set up minimal BOOTCLASSPATH list of jars to build/execute
 # java code with dalvikvm/art.
-# Jars present in the ART apex. These should match exactly the list of
-# Java libraries in the ART apex build rule.
+# Jars present in the ART apex. These should match exactly the list of Java
+# libraries in art-bootclasspath-fragment. The APEX variant name
+# (com.android.art) is the same regardless which Soong module provides the ART
+# APEX. See the long comment in build/soong/java/dexprepopt_bootjars.go for
+# details.
 ART_APEX_JARS := \
     com.android.art:core-oj \
     com.android.art:core-libart \
diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk
index 572d6e4..055ff14 100644
--- a/core/java_renderscript.mk
+++ b/core/java_renderscript.mk
@@ -107,7 +107,7 @@
 # Prevent these from showing up on the device
 # One exception is librsjni.so, which is needed for
 # both native path and compat path.
-rs_jni_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,librsjni.so)/librsjni.so
+rs_jni_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,librsjni)/librsjni.so
 LOCAL_JNI_SHARED_LIBRARIES += librsjni
 
 ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)$(FORCE_BUILD_RS_COMPAT))
diff --git a/core/main.mk b/core/main.mk
index 6f0277e..72478da 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -320,6 +320,13 @@
     ro.vendor.build.dont_use_vabc=true
 endif
 
+# Set the flag in vendor. So VTS would know if the new fingerprint format is in use when
+# the system images are replaced by GSI.
+ifeq ($(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT),true)
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.vendor.build.fingerprint_has_digest=1
+endif
+
 ADDITIONAL_VENDOR_PROPERTIES += \
     ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
     ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
@@ -542,7 +549,12 @@
 
 $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
 
+# For an unbundled image, we can skip blueprint_tools because unbundled image
+# aims to remove a large number framework projects from the manifest, the
+# sources or dependencies for these tools may be missing from the tree.
+ifeq (,$(TARGET_BUILD_UNBUNDLED_IMAGE))
 droid_targets : blueprint_tools
+endif
 
 endif # dont_bother
 
@@ -1727,7 +1739,6 @@
   $(call dist-for-goals, droidcore, \
     $(BUILT_OTATOOLS_PACKAGE) \
     $(APPCOMPAT_ZIP) \
-    $(DEXPREOPT_CONFIG_ZIP) \
     $(DEXPREOPT_TOOLS_ZIP) \
   )
 
@@ -1775,6 +1786,7 @@
     $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
     $(INSTALLED_MISC_INFO_TARGET) \
     $(INSTALLED_RAMDISK_TARGET) \
+    $(DEXPREOPT_CONFIG_ZIP) \
   )
 
   # Put a copy of the radio/bootloader files in the dist dir.
diff --git a/core/product.mk b/core/product.mk
index 015fe44..7e8445e 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -213,12 +213,18 @@
 # The list of product-specific kernel header dirs
 _product_list_vars += PRODUCT_VENDOR_KERNEL_HEADERS
 
-# A list of module names of BOOTCLASSPATH (jar files)
+# A list of module names in BOOTCLASSPATH (jar files). Each module may be
+# prefixed with "<apex>:", which identifies the APEX that provides it. APEXes
+# are identified by their "variant" names, i.e. their `apex_name` values in
+# Soong, which default to the `name` values. The prefix can also be "platform:"
+# or "system_ext:", and defaults to "platform:" if left out. See the long
+# comment in build/soong/java/dexprepopt_bootjars.go for details.
 _product_list_vars += PRODUCT_BOOT_JARS
 
-# A list of extra BOOTCLASSPATH jars (to be appended after common jars).
-# Products that include device-specific makefiles before AOSP makefiles should use this
-# instead of PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
+# A list of extra BOOTCLASSPATH jars (to be appended after common jars),
+# following the same format as PRODUCT_BOOT_JARS. Products that include
+# device-specific makefiles before AOSP makefiles should use this instead of
+# PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
 _product_list_vars += PRODUCT_BOOT_JARS_EXTRA
 
 _product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
@@ -389,7 +395,8 @@
 _product_single_value_vars += PRODUCT_BUILD_VBMETA_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_SUPER_EMPTY_IMAGE
 
-# List of boot jars delivered via apex
+# List of boot jars delivered via updatable APEXes, following the same format as
+# PRODUCT_BOOT_JARS.
 _product_list_vars += PRODUCT_UPDATABLE_BOOT_JARS
 
 # If set, device uses virtual A/B.
diff --git a/core/product_config.mk b/core/product_config.mk
index 5c85fb8..53bc9dd 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -249,6 +249,15 @@
 
 PRODUCT_BOOT_JARS := $(call qualify-platform-jars,$(PRODUCT_BOOT_JARS))
 
+# b/191127295: force core-icu4j onto boot image. It comes from a non-updatable APEX jar, but has
+# historically been part of the boot image; even though APEX jars are not meant to be part of the
+# boot image.
+# TODO(b/191686720): remove PRODUCT_UPDATABLE_BOOT_JARS to avoid a special handling of core-icu4j
+# in make rules.
+PRODUCT_UPDATABLE_BOOT_JARS := $(filter-out com.android.i18n:core-icu4j,$(PRODUCT_UPDATABLE_BOOT_JARS))
+# All APEX jars come after /system and /system_ext jars, so adding core-icu4j at the end of the list
+PRODUCT_BOOT_JARS += com.android.i18n:core-icu4j
+
 # Replaces references to overridden boot jar modules in a boot jars variable.
 # $(1): Name of a boot jars variable with <apex>:<jar> pairs.
 define replace-boot-jar-module-overrides
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 111e759..8e85c4b 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -62,7 +62,8 @@
     elif _options.format == "pretty":
         print(attr, "=", repr(value))
     elif _options.format == "make":
-        print(attr, ":=", value)
+        # Trim all spacing to a single space
+        print(attr, ":=", _mkstrip(value))
     else:
         fail("bad output format", _options.format)
 
@@ -432,6 +433,66 @@
     """Prints info."""
     print(message)
 
+
+def __mkpatsubst_word(parsed_pattern,parsed_subst, word):
+    (before, after) = parsed_pattern
+    if not word.startswith(before):
+        return word
+    if not word.endswith(after):
+        return word
+    if len(parsed_subst) < 2:
+        return parsed_subst[0]
+    return parsed_subst[0] + word[len(before):len(word) - len(after)] + parsed_subst[1]
+
+
+def _mkpatsubst(pattern, replacement, s):
+    """Emulates Make's patsubst.
+
+    Tokenizes `s` (unless it is already a list), and then performs a simple
+    wildcard substitution (in other words, `foo%bar` pattern is equivalent to
+    the regular expression `^foo(.*)bar$, and the first `%` in replacement is
+    $1 in regex terms). Escaping % is not supported
+    """
+    if pattern.find("\\") >= 0:
+        fail("'\\' in pattern is not allowed")
+    parsed_pattern = pattern.split("%", 1)
+    words = s if type(s) == "list" else _mkstrip(s).split(" ")
+    if len(parsed_pattern) == 1:
+        out_words = [ replacement if x == pattern else x for x in words]
+    else:
+        parsed_replacement = replacement.split("%", 1)
+        out_words = [__mkpatsubst_word(parsed_pattern, parsed_replacement, x) for x in words]
+    return out_words if type(s) == "list" else " ".join(out_words)
+
+
+def _mkstrip(s):
+    """Emulates Make's strip.
+
+    That is, removes string's leading and trailing whitespace characters and
+    replaces any sequence of whitespace characters with with a single space.
+    """
+    result = ""
+    was_space = False
+    for ch in s.strip().elems():
+        is_space = ch.isspace()
+        if not is_space:
+            if was_space:
+                result += " "
+            result += ch
+        was_space = is_space
+    return result
+
+def _mksubst(old, new, s):
+    """Emulates Make's subst.
+
+    Replaces each occurence of 'old' with 'new'.
+    If 's' is a list, applies substitution to each item.
+    """
+    if type(s) == "list":
+        return [e.replace(old, new) for e in s]
+    return s.replace(old, new)
+
+
 def __get_options():
     """Returns struct containing runtime global settings."""
     settings = dict(
@@ -478,7 +539,10 @@
     indirect = _indirect,
     mkinfo = _mkinfo,
     mkerror = _mkerror,
+    mkpatsubst = _mkpatsubst,
     mkwarning = _mkwarning,
+    mkstrip = _mkstrip,
+    mksubst = _mksubst,
     printvars = _printvars,
     product_configuration = _product_configuration,
     require_artifacts_in_path = _require_artifacts_in_path,
diff --git a/core/soong_config.mk b/core/soong_config.mk
index ec67560..1d94661 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -37,6 +37,7 @@
 $(call add_json_bool, Allow_missing_dependencies,        $(filter true,$(ALLOW_MISSING_DEPENDENCIES)))
 $(call add_json_bool, Unbundled_build,                   $(TARGET_BUILD_UNBUNDLED))
 $(call add_json_bool, Unbundled_build_apps,              $(TARGET_BUILD_APPS))
+$(call add_json_bool, Unbundled_build_image,             $(TARGET_BUILD_UNBUNDLED_IMAGE))
 $(call add_json_bool, Always_use_prebuilt_sdks,          $(TARGET_BUILD_USE_PREBUILT_SDKS))
 $(call add_json_bool, Skip_boot_jars_check,              $(SKIP_BOOT_JARS_CHECK))
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 0fc96e0..be9b1f8 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -98,7 +98,7 @@
     $(eval _option := --allow-dup)\
 )
 
-$(2): $(POST_PROCESS_PROPS) $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) $(3) $(6)
+$(2): $(POST_PROCESS_PROPS) $(INTERNAL_BUILD_ID_MAKEFILE) $(3) $(6)
 	$(hide) echo Building $$@
 	$(hide) mkdir -p $$(dir $$@)
 	$(hide) rm -f $$@ && touch $$@
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 663579a..90e1825 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -39,9 +39,9 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := SP1A
-MIN_PLATFORM_VERSION := SP1A
-MAX_PLATFORM_VERSION := SP1A
+DEFAULT_PLATFORM_VERSION := TP1A
+MIN_PLATFORM_VERSION := TP1A
+MAX_PLATFORM_VERSION := TP1A
 
 ALLOWED_VERSIONS := $(call allowed-platform-versions,\
   $(MIN_PLATFORM_VERSION),\
@@ -93,7 +93,8 @@
 
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.SP1A := REL
+
+PLATFORM_VERSION_CODENAME.TP1A := T
 
 ifndef PLATFORM_VERSION_CODENAME
   PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
@@ -229,7 +230,7 @@
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
   PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_SDK_VERSION)
 else
-  PLATFORM_SYSTEMSDK_VERSIONS += $(PLATFORM_VERSION_CODENAME)
+  PLATFORM_SYSTEMSDK_VERSIONS += $(subst $(comma),$(space),$(PLATFORM_VERSION_ALL_CODENAMES))
 endif
 PLATFORM_SYSTEMSDK_VERSIONS := $(strip $(sort $(PLATFORM_SYSTEMSDK_VERSIONS)))
 .KATI_READONLY := PLATFORM_SYSTEMSDK_VERSIONS
diff --git a/envsetup.sh b/envsetup.sh
index 8a995c7..77b2247 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -331,15 +331,15 @@
 
 function bazel()
 {
-    local T="$(gettop)"
-    if [ ! "$T" ]; then
-        echo "Couldn't locate the top of the tree.  Try setting TOP."
-        return
+    if which bazel &>/dev/null; then
+        >&2 echo "NOTE: bazel() function sourced from Android's envsetup.sh is being used instead of $(which bazel)"
+        >&2 echo
     fi
 
-    if which bazel &>/dev/null; then
-        >&2 echo "NOTE: bazel() function sourced from envsetup.sh is being used instead of $(which bazel)"
-        >&2 echo
+    local T="$(gettop)"
+    if [ ! "$T" ]; then
+        >&2 echo "Couldn't locate the top of the Android tree. Try setting TOP. This bazel() function cannot be used outside of the AOSP directory."
+        return
     fi
 
     "$T/tools/bazel" "$@"
@@ -703,6 +703,10 @@
     build_build_var_cache
     if [ $? -ne 0 ]
     then
+        if [[ "$product" =~ .*_(eng|user|userdebug) ]]
+        then
+            echo "Did you mean -${product/*_/}? (dash instead of underscore)"
+        fi
         return 1
     fi
     export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
@@ -1687,12 +1691,36 @@
     if T="$(gettop)"; then
       _wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
     else
-      echo "Couldn't locate the top of the tree. Try setting TOP."
+      >&2 echo "Couldn't locate the top of the tree. Try setting TOP."
+      return 1
+    fi
+)
+
+# Convenience entry point (like m) to use Bazel in AOSP.
+function b()
+(
+    # Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
+    _trigger_build "all-modules" nothing GENERATE_BAZEL_FILES=true USE_BAZEL_ANALYSIS= || return 1
+    # Then, run Bazel using the synthetic workspace as the --package_path.
+    if [[ -z "$@" ]]; then
+        # If there are no args, show help.
+        bazel help
+    else
+        # Else, always run with the bp2build configuration, which sets Bazel's package path to the synthetic workspace.
+        bazel "$@" --config=bp2build
     fi
 )
 
 function m()
 (
+    if [[ "${USE_BAZEL_ANALYSIS}" =~ ^(true|1)$ ]]; then
+        # This only short-circuits to Bazel for a single module target now.
+        b cquery "@soong_injection//module_name_to_label:$@" 2>/dev/null
+        if [[ $? == 0 ]]; then
+            bazel build "@soong_injection//module_name_to_label:$@" --config=bp2build
+            return $?
+        fi
+    fi
     _trigger_build "all-modules" "$@"
 )
 
@@ -1843,6 +1871,16 @@
     fi
 }
 
+# Source necessary setup scripts needed to run the build with Remote Execution.
+function source_rbe() {
+    local T=$(gettop)
+
+    if [[ "x$USE_RBE" != "x" && "$USE_RBE" != "false" ]]; then
+        . $T/build/make/rbesetup.sh --skip-envsetup
+    fi
+}
+
 validate_current_shell
 source_vendorsetup
+source_rbe
 addcompletions
diff --git a/rbesetup.sh b/rbesetup.sh
index ec39e6e..3b0e7cf 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -24,8 +24,11 @@
 }
 
 # This function needs to run first as the remaining defining functions may be
-# using the envsetup.sh defined functions.
-_source_env_setup_script || return
+# using the envsetup.sh defined functions. Skip this part if this script is already
+# being invoked from envsetup.sh.
+if [[ "$1" != "--skip-envsetup" ]]; then
+  _source_env_setup_script || return
+fi
 
 # This function prefixes the given command with appropriate variables needed
 # for the build to be executed with RBE.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 4dd6b17..142270e 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -24,8 +24,10 @@
 	$(call pretty,"Generated: ($@)")
 ifdef board_info_txt
 	$(hide) grep -v '#' $< > $@
-else
+else ifdef TARGET_BOOTLOADER_BOARD_NAME
 	$(hide) echo "board=$(TARGET_BOOTLOADER_BOARD_NAME)" > $@
+else
+	$(hide) echo "" > $@
 endif
 
 # Copy compatibility metadata to the device.
diff --git a/target/board/BoardConfigGkiCommon.mk b/target/board/BoardConfigGkiCommon.mk
index 1a8c6b1..5173012 100644
--- a/target/board/BoardConfigGkiCommon.mk
+++ b/target/board/BoardConfigGkiCommon.mk
@@ -29,9 +29,6 @@
 BOARD_USES_RECOVERY_AS_BOOT :=
 TARGET_NO_KERNEL := false
 BOARD_USES_GENERIC_KERNEL_IMAGE := true
-BOARD_KERNEL_MODULE_INTERFACE_VERSIONS := \
-    5.4-android12-unstable \
-    5.10-android12-unstable \
 
 # Copy boot image in $OUT to target files. This is defined for targets where
 # the installed GKI APEXes are built from source.
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 180c72b..ce60c5e 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -52,6 +52,7 @@
     com.android.adbd \
     com.android.appsearch \
     com.android.conscrypt \
+    com.android.cronet \
     com.android.extservices \
     com.android.i18n \
     com.android.ipsec \
@@ -132,6 +133,7 @@
     libaudioeffect_jni \
     libbinder \
     libbinder_ndk \
+    libbinder_rpc_unstable \
     libc.bootstrap \
     libcamera2ndk \
     libcutils \
@@ -214,7 +216,7 @@
     mtpd \
     ndc \
     netd \
-    NetworkStack \
+    NetworkStackNext \
     odsign \
     org.apache.http.legacy \
     otacerts \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 07b3361..ffa6b7c 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -82,3 +82,9 @@
 # /vendor. TODO(b/141648565): Don't install these unless they're needed.
 PRODUCT_PACKAGES += \
     applypatch
+
+# Base modules and settings for the debug ramdisk, which is then packed
+# into a boot-debug.img and a vendor_boot-debug.img.
+PRODUCT_PACKAGES += \
+    adb_debug.prop \
+    userdebug_plat_sepolicy.cil
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 0fa9058..8fe5cfd 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -45,14 +45,12 @@
     voip-common \
     ims-common
 
-# Non-updatable APEX jars. Keep the list sorted.
-PRODUCT_BOOT_JARS += \
-    com.android.i18n:core-icu4j
-
-# Updatable APEX boot jars. Keep the list sorted by module names and then library names.
+# APEX boot jars. Keep the list sorted by module names and then library names.
+# Note: core-icu4j is moved back to PRODUCT_BOOT_JARS in product_config.mk at a later stage.
 PRODUCT_UPDATABLE_BOOT_JARS := \
     com.android.appsearch:framework-appsearch \
     com.android.conscrypt:conscrypt \
+    com.android.i18n:core-icu4j \
     com.android.ipsec:android.net.ipsec.ike \
     com.android.media:updatable-media \
     com.android.mediaprovider:framework-mediaprovider \
@@ -65,9 +63,10 @@
     com.android.tethering:framework-tethering \
     com.android.wifi:framework-wifi
 
-# Updatable APEX system server jars. Keep the list sorted by module names and then library names.
+# APEX system server jars. Keep the list sorted by module names and then library names.
 PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
     com.android.appsearch:service-appsearch \
+    com.android.art:service-art \
     com.android.media:service-media-s \
     com.android.permission:service-permission \
 
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index ae81329..80d34be 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -25,6 +25,7 @@
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
+    adb_debug.prop \
     userdebug_plat_sepolicy.cil \
 
 _my_paths := \
diff --git a/tests/run.rbc b/tests/run.rbc
index b13f835..4cda180 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -1,4 +1,3 @@
-
 # Copyright 2021 Google LLC
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -28,6 +27,22 @@
     if expected != actual:
         fail("Expected %s, got %s" % (expected, actual))
 
+# Unit tests for non-trivial runtime functions
+assert_eq("", rblf.mkstrip(" \n \t    "))
+assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
+
+assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
+assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
+
+assert_eq("ABcdYZ", rblf.mkpatsubst("ab%yz", "AB%YZ", "abcdyz"))
+assert_eq("bcz", rblf.mkpatsubst("a%z", "A%Z", "bcz"))
+assert_eq(["Ay", "Az"], rblf.mkpatsubst("a%", "A%", ["ay", "az"]))
+assert_eq("AcZ bcz", rblf.mkpatsubst("a%z", "A%Z", "acz  bcz"))
+assert_eq("Abcd", rblf.mkpatsubst("a%", "A%", "abcd"))
+assert_eq("abcZ", rblf.mkpatsubst("%z", "%Z", "abcz"))
+assert_eq("azx b", rblf.mkpatsubst("az", "AZ", "azx  b"))
+assert_eq(["azx", "b"], rblf.mkpatsubst("az", "AZ", ["azx", "b"]))
+assert_eq("ABC", rblf.mkpatsubst("abc", "ABC", "abc"))
 
 globals, config = rblf.product_configuration("test/device", init)
 assert_eq(
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 10d25e0..63cb4eb 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -27,7 +27,22 @@
 system_android_filesystem_config := system/core/libcutils/include/private/android_filesystem_config.h
 system_capability_header := bionic/libc/kernel/uapi/linux/capability.h
 
-# List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, product and system_ext Partitions
+# Use snapshots if exist
+vendor_android_filesystem_config := $(strip \
+  $(if $(filter-out current,$(BOARD_VNDK_VERSION)), \
+    $(SOONG_VENDOR_$(BOARD_VNDK_VERSION)_SNAPSHOT_DIR)/include/$(system_android_filesystem_config)))
+ifeq (,$(wildcard $(vendor_android_filesystem_config)))
+vendor_android_filesystem_config := $(system_android_filesystem_config)
+endif
+
+vendor_capability_header := $(strip \
+  $(if $(filter-out current,$(BOARD_VNDK_VERSION)), \
+    $(SOONG_VENDOR_$(BOARD_VNDK_VERSION)_SNAPSHOT_DIR)/include/$(system_capability_header)))
+ifeq (,$(wildcard $(vendor_capability_header)))
+vendor_capability_header := $(system_capability_header)
+endif
+
+# List of supported vendor, oem, odm, vendor_dlkm and odm_dlkm Partitions
 fs_config_generate_extra_partition_list := $(strip \
   $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
   $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
@@ -206,10 +221,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -232,10 +247,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -316,10 +331,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -342,10 +357,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -371,10 +386,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -397,10 +412,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -426,10 +441,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
@@ -452,10 +467,10 @@
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
 include $(BUILD_SYSTEM)/base_rules.mk
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config)
-$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(system_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
 $(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) $(system_capability_header)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
 	@mkdir -p $(dir $@)
 	$< fsconfig \
 	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 5ee53c8..fc588e4 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -164,6 +164,7 @@
         "releasetools_common",
         "releasetools_verity_utils",
         "apex_manifest",
+        "care_map_proto_py",
     ],
     required: [
         "brillo_update_payload",
@@ -400,7 +401,7 @@
         "releasetools_common",
     ],
     required: [
-        "aapt",
+        "aapt2",
     ],
 }
 
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index babfc7d..b8c812d 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -687,8 +687,10 @@
               os.path.join(OPTIONS.input_tmp, "IMAGES",
                            "{}.img".format(partition_name))))
 
+
 def AddApexInfo(output_zip):
-  apex_infos = GetApexInfoFromTargetFiles(OPTIONS.input_tmp, 'system')
+  apex_infos = GetApexInfoFromTargetFiles(OPTIONS.input_tmp, 'system',
+                                          compressed_only=False)
   apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
   apex_metadata_proto.apex_info.extend(apex_infos)
   apex_info_bytes = apex_metadata_proto.SerializeToString()
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index fa4a152..f2ba321 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -287,7 +287,7 @@
     if "flash_logical_block_size" in prop_dict:
       build_command.extend(["-o", prop_dict["flash_logical_block_size"]])
     # Specify UUID and hash_seed if using mke2fs.
-    if prop_dict["ext_mkuserimg"] == "mkuserimg_mke2fs":
+    if os.path.basename(prop_dict["ext_mkuserimg"]) == "mkuserimg_mke2fs":
       if "uuid" in prop_dict:
         build_command.extend(["-U", prop_dict["uuid"]])
       if "hash_seed" in prop_dict:
@@ -353,6 +353,8 @@
     build_command.extend(["-t", prop_dict["mount_point"]])
     if "timestamp" in prop_dict:
       build_command.extend(["-T", str(prop_dict["timestamp"])])
+    if "block_list" in prop_dict:
+      build_command.extend(["-B", prop_dict["block_list"]])
     build_command.extend(["-L", prop_dict["mount_point"]])
     if (needs_projid):
       build_command.append("--prjquota")
@@ -360,8 +362,9 @@
       build_command.append("--casefold")
     if (needs_compress or prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--compression")
-    if (prop_dict.get("f2fs_compress") == "true"):
+    if (prop_dict.get("mount_point") != "data"):
       build_command.append("--readonly")
+    if (prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--sldc")
       if (prop_dict.get("f2fs_sldc_flags") == None):
         build_command.append(str(0))
diff --git a/tools/releasetools/care_map_pb2.py b/tools/releasetools/care_map_pb2.py
new file mode 100644
index 0000000..06aee25
--- /dev/null
+++ b/tools/releasetools/care_map_pb2.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: bootable/recovery/update_verifier/care_map.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='bootable/recovery/update_verifier/care_map.proto',
+  package='recovery_update_verifier',
+  syntax='proto3',
+  serialized_options=_b('H\003'),
+  serialized_pb=_b('\n0bootable/recovery/update_verifier/care_map.proto\x12\x18recovery_update_verifier\"\x9e\x01\n\x07\x43\x61reMap\x12\x43\n\npartitions\x18\x01 \x03(\x0b\x32/.recovery_update_verifier.CareMap.PartitionInfo\x1aN\n\rPartitionInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06ranges\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\t\x12\x13\n\x0b\x66ingerprint\x18\x04 \x01(\tB\x02H\x03\x62\x06proto3')
+)
+
+
+
+
+_CAREMAP_PARTITIONINFO = _descriptor.Descriptor(
+  name='PartitionInfo',
+  full_name='recovery_update_verifier.CareMap.PartitionInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='recovery_update_verifier.CareMap.PartitionInfo.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='ranges', full_name='recovery_update_verifier.CareMap.PartitionInfo.ranges', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='id', full_name='recovery_update_verifier.CareMap.PartitionInfo.id', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='fingerprint', full_name='recovery_update_verifier.CareMap.PartitionInfo.fingerprint', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=159,
+  serialized_end=237,
+)
+
+_CAREMAP = _descriptor.Descriptor(
+  name='CareMap',
+  full_name='recovery_update_verifier.CareMap',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='partitions', full_name='recovery_update_verifier.CareMap.partitions', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_CAREMAP_PARTITIONINFO, ],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=79,
+  serialized_end=237,
+)
+
+_CAREMAP_PARTITIONINFO.containing_type = _CAREMAP
+_CAREMAP.fields_by_name['partitions'].message_type = _CAREMAP_PARTITIONINFO
+DESCRIPTOR.message_types_by_name['CareMap'] = _CAREMAP
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+CareMap = _reflection.GeneratedProtocolMessageType('CareMap', (_message.Message,), {
+
+  'PartitionInfo' : _reflection.GeneratedProtocolMessageType('PartitionInfo', (_message.Message,), {
+    'DESCRIPTOR' : _CAREMAP_PARTITIONINFO,
+    '__module__' : 'bootable.recovery.update_verifier.care_map_pb2'
+    # @@protoc_insertion_point(class_scope:recovery_update_verifier.CareMap.PartitionInfo)
+    })
+  ,
+  'DESCRIPTOR' : _CAREMAP,
+  '__module__' : 'bootable.recovery.update_verifier.care_map_pb2'
+  # @@protoc_insertion_point(class_scope:recovery_update_verifier.CareMap)
+  })
+_sym_db.RegisterMessage(CareMap)
+_sym_db.RegisterMessage(CareMap.PartitionInfo)
+
+
+DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index f678d08..0711af5 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -80,11 +80,6 @@
     self.boot_signer_args = []
     self.verity_signer_path = None
     self.verity_signer_args = []
-    self.aftl_tool_path = None
-    self.aftl_server = None
-    self.aftl_key_path = None
-    self.aftl_manufacturer_key_path = None
-    self.aftl_signer_helper = None
     self.verbose = False
     self.tempfiles = []
     self.device_specific = None
@@ -276,6 +271,9 @@
     args = args[:]
     args[0] = FindHostToolPath(args[0])
 
+  if verbose is None:
+    verbose = OPTIONS.verbose
+
   # Don't log any if caller explicitly says so.
   if verbose:
     logger.info("  Running: \"%s\"", " ".join(args))
@@ -1380,46 +1378,6 @@
   return "{}:{}:{}".format(partition, rollback_index_location, pubkey_path)
 
 
-def ConstructAftlMakeImageCommands(output_image):
-  """Constructs the command to append the aftl image to vbmeta."""
-
-  # Ensure the other AFTL parameters are set as well.
-  assert OPTIONS.aftl_tool_path is not None, 'No aftl tool provided.'
-  assert OPTIONS.aftl_key_path is not None, 'No AFTL key provided.'
-  assert OPTIONS.aftl_manufacturer_key_path is not None, \
-      'No AFTL manufacturer key provided.'
-
-  vbmeta_image = MakeTempFile()
-  os.rename(output_image, vbmeta_image)
-  build_info = BuildInfo(OPTIONS.info_dict, use_legacy_id=True)
-  version_incremental = build_info.GetBuildProp("ro.build.version.incremental")
-  aftltool = OPTIONS.aftl_tool_path
-  server_argument_list = [OPTIONS.aftl_server, OPTIONS.aftl_key_path]
-  aftl_cmd = [aftltool, "make_icp_from_vbmeta",
-              "--vbmeta_image_path", vbmeta_image,
-              "--output", output_image,
-              "--version_incremental", version_incremental,
-              "--transparency_log_servers", ','.join(server_argument_list),
-              "--manufacturer_key", OPTIONS.aftl_manufacturer_key_path,
-              "--algorithm", "SHA256_RSA4096",
-              "--padding", "4096"]
-  if OPTIONS.aftl_signer_helper:
-    aftl_cmd.extend(shlex.split(OPTIONS.aftl_signer_helper))
-  return aftl_cmd
-
-
-def AddAftlInclusionProof(output_image):
-  """Appends the aftl inclusion proof to the vbmeta image."""
-
-  aftl_cmd = ConstructAftlMakeImageCommands(output_image)
-  RunAndCheckOutput(aftl_cmd)
-
-  verify_cmd = ['aftltool', 'verify_image_icp', '--vbmeta_image_path',
-                output_image, '--transparency_log_pub_keys',
-                OPTIONS.aftl_key_path]
-  RunAndCheckOutput(verify_cmd)
-
-
 def AppendGkiSigningArgs(cmd):
   """Append GKI signing arguments for mkbootimg."""
   # e.g., --gki_signing_key path/to/signing_key
@@ -1513,10 +1471,6 @@
 
   RunAndCheckOutput(cmd)
 
-  # Generate the AFTL inclusion proof.
-  if OPTIONS.aftl_server is not None:
-    AddAftlInclusionProof(image_path)
-
 
 def _MakeRamdisk(sourcedir, fs_config_file=None,
                  ramdisk_format=RamdiskFormat.GZ):
@@ -2474,9 +2428,7 @@
          "java_path=", "java_args=", "android_jar_path=", "public_key_suffix=",
          "private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
          "verity_signer_path=", "verity_signer_args=", "device_specific=",
-         "extra=", "logfile=", "aftl_tool_path=", "aftl_server=",
-         "aftl_key_path=", "aftl_manufacturer_key_path=",
-         "aftl_signer_helper="] + list(extra_long_opts))
+         "extra=", "logfile="] + list(extra_long_opts))
   except getopt.GetoptError as err:
     Usage(docstring)
     print("**", str(err), "**")
@@ -2514,16 +2466,6 @@
       OPTIONS.verity_signer_path = a
     elif o in ("--verity_signer_args",):
       OPTIONS.verity_signer_args = shlex.split(a)
-    elif o in ("--aftl_tool_path",):
-      OPTIONS.aftl_tool_path = a
-    elif o in ("--aftl_server",):
-      OPTIONS.aftl_server = a
-    elif o in ("--aftl_key_path",):
-      OPTIONS.aftl_key_path = a
-    elif o in ("--aftl_manufacturer_key_path",):
-      OPTIONS.aftl_manufacturer_key_path = a
-    elif o in ("--aftl_signer_helper",):
-      OPTIONS.aftl_signer_helper = a
     elif o in ("-s", "--device_specific"):
       OPTIONS.device_specific = a
     elif o in ("-x", "--extra"):
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 5e6c42d..c1fa9e7 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -78,6 +78,14 @@
       If provided, duplicate APK/APEX keys are ignored and the value from the
       framework is used.
 
+  --rebuild-sepolicy
+      If provided, rebuilds odm.img or vendor.img to include merged sepolicy
+      files. If odm is present then odm is preferred.
+
+  --vendor-otatools otatools.zip
+      If provided, use this otatools.zip when recompiling the odm or vendor
+      image to include sepolicy.
+
   --keep-tmp
       Keep tempoary files for debugging purposes.
 """
@@ -129,6 +137,8 @@
 OPTIONS.rebuild_recovery = False
 # TODO(b/150582573): Remove this option.
 OPTIONS.allow_duplicate_apkapex_keys = False
+OPTIONS.vendor_otatools = None
+OPTIONS.rebuild_sepolicy = False
 OPTIONS.keep_tmp = False
 
 # In an item list (framework or vendor), we may see entries that select whole
@@ -666,7 +676,7 @@
       os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
 
 
-def compile_split_sepolicy(product_out, partition_map, output_policy):
+def compile_split_sepolicy(product_out, partition_map):
   """Uses secilc to compile a split sepolicy file.
 
   Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
@@ -674,7 +684,6 @@
   Args:
     product_out: PRODUCT_OUT directory, containing partition directories.
     partition_map: A map of partition name -> relative path within product_out.
-    output_policy: The name of the output policy created by secilc.
 
   Returns:
     A command list that can be executed to create the compiled sepolicy.
@@ -709,7 +718,7 @@
   # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
   cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
   cmd.extend(['-c', kernel_sepolicy_version])
-  cmd.extend(['-o', output_policy])
+  cmd.extend(['-o', os.path.join(product_out, 'META/combined_sepolicy')])
   cmd.extend(['-f', '/dev/null'])
 
   required_policy_files = (
@@ -747,7 +756,8 @@
   Depends on the <partition>/apex/* APEX files within partitions.
 
   Args:
-    output_target_files_dir: Output directory containing merged partition directories.
+    output_target_files_dir: Output directory containing merged partition
+      directories.
     partitions: A list of all the partitions in the output directory.
 
   Raises:
@@ -965,6 +975,92 @@
   add_img_to_target_files.main(add_img_args)
 
 
+def rebuild_image_with_sepolicy(target_files_dir,
+                                vendor_otatools=None,
+                                vendor_target_files=None):
+  """Rebuilds odm.img or vendor.img to include merged sepolicy files.
+
+  If odm is present then odm is preferred -- otherwise vendor is used.
+
+  Args:
+    target_files_dir: Path to the extracted merged target-files package.
+    vendor_otatools: If not None, path to an otatools.zip from the vendor build
+      that is used when recompiling the image.
+    vendor_target_files: Expected if vendor_otatools is not None. Path to the
+      vendor target-files zip.
+  """
+  partition = 'vendor'
+  if os.path.exists(os.path.join(target_files_dir, 'ODM')) or os.path.exists(
+      os.path.join(target_files_dir, 'IMAGES/odm.img')):
+    partition = 'odm'
+  partition_img = '{}.img'.format(partition)
+
+  logger.info('Recompiling %s using the merged sepolicy files.', partition_img)
+
+  # Copy the combined SEPolicy file and framework hashes to the image that is
+  # being rebuilt.
+  def copy_selinux_file(input_path, output_filename):
+    shutil.copy(
+        os.path.join(target_files_dir, input_path),
+        os.path.join(target_files_dir, partition.upper(), 'etc/selinux',
+                     output_filename))
+
+  copy_selinux_file('META/combined_sepolicy', 'precompiled_sepolicy')
+  copy_selinux_file('SYSTEM/etc/selinux/plat_sepolicy_and_mapping.sha256',
+                    'precompiled_sepolicy.plat_sepolicy_and_mapping.sha256')
+  copy_selinux_file(
+      'SYSTEM_EXT/etc/selinux/system_ext_sepolicy_and_mapping.sha256',
+      'precompiled_sepolicy.system_ext_sepolicy_and_mapping.sha256')
+  copy_selinux_file('PRODUCT/etc/selinux/product_sepolicy_and_mapping.sha256',
+                    'precompiled_sepolicy.product_sepolicy_and_mapping.sha256')
+
+  if not vendor_otatools:
+    # Remove the partition from the merged target-files archive. It will be
+    # rebuilt later automatically by generate_images().
+    os.remove(os.path.join(target_files_dir, 'IMAGES', partition_img))
+  else:
+    # TODO(b/192253131): Remove the need for vendor_otatools by fixing
+    # backwards-compatibility issues when compiling images on R from S+.
+    if not vendor_target_files:
+      raise ValueError(
+          'Expected vendor_target_files if vendor_otatools is not None.')
+    logger.info(
+        '%s recompilation will be performed using the vendor otatools.zip',
+        partition_img)
+
+    # Unzip the vendor build's otatools.zip and target-files archive.
+    vendor_otatools_dir = common.MakeTempDir(
+        prefix='merge_target_files_vendor_otatools_')
+    vendor_target_files_dir = common.MakeTempDir(
+        prefix='merge_target_files_vendor_target_files_')
+    common.UnzipToDir(vendor_otatools, vendor_otatools_dir)
+    common.UnzipToDir(vendor_target_files, vendor_target_files_dir)
+
+    # Copy the partition contents from the merged target-files archive to the
+    # vendor target-files archive.
+    shutil.rmtree(os.path.join(vendor_target_files_dir, partition.upper()))
+    shutil.copytree(
+        os.path.join(target_files_dir, partition.upper()),
+        os.path.join(vendor_target_files_dir, partition.upper()))
+
+    # Delete then rebuild the partition.
+    os.remove(os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
+    rebuild_partition_command = [
+        os.path.join(vendor_otatools_dir, 'bin', 'add_img_to_target_files'),
+        '--verbose',
+        '--add_missing',
+        vendor_target_files_dir,
+    ]
+    logger.info('Recompiling %s: %s', partition_img,
+                ' '.join(rebuild_partition_command))
+    common.RunAndCheckOutput(rebuild_partition_command, verbose=True)
+
+    # Move the newly-created image to the merged target files dir.
+    shutil.move(
+        os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
+        os.path.join(target_files_dir, 'IMAGES', partition_img))
+
+
 def generate_super_empty_image(target_dir, output_super_empty):
   """Generates super_empty image from target package.
 
@@ -1049,7 +1145,8 @@
                        framework_misc_info_keys, vendor_target_files,
                        vendor_item_list, output_target_files, output_dir,
                        output_item_list, output_ota, output_img,
-                       output_super_empty, rebuild_recovery):
+                       output_super_empty, rebuild_recovery, vendor_otatools,
+                       rebuild_sepolicy):
   """Merges two target files packages together.
 
   This function takes framework and vendor target files packages as input,
@@ -1085,6 +1182,9 @@
       merged target files package and saves it at this path.
     rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
       devices and write it to the system image.
+    vendor_otatools: Path to an otatools zip used for recompiling vendor images.
+    rebuild_sepolicy: If true, rebuild odm.img (if target uses ODM) or
+      vendor.img using a merged precompiled_sepolicy file.
   """
 
   logger.info('starting: merge framework %s and vendor %s into output %s',
@@ -1137,14 +1237,14 @@
       partition_map=filtered_partitions)
 
   # Check that the split sepolicy from the multiple builds can compile.
-  split_sepolicy_cmd = compile_split_sepolicy(
-      product_out=output_target_files_temp_dir,
-      partition_map=filtered_partitions,
-      output_policy=os.path.join(output_target_files_temp_dir,
-                                 'META/combined.policy'))
+  split_sepolicy_cmd = compile_split_sepolicy(output_target_files_temp_dir,
+                                              filtered_partitions)
   logger.info('Compiling split sepolicy: %s', ' '.join(split_sepolicy_cmd))
   common.RunAndCheckOutput(split_sepolicy_cmd)
-  # TODO(b/178864050): Run tests on the combined.policy file.
+  # Include the compiled policy in an image if requested.
+  if rebuild_sepolicy:
+    rebuild_image_with_sepolicy(output_target_files_temp_dir, vendor_otatools,
+                                vendor_target_files)
 
   # Run validation checks on the pre-installed APEX files.
   validate_merged_apex_info(output_target_files_temp_dir, partition_map.keys())
@@ -1261,6 +1361,10 @@
       OPTIONS.rebuild_recovery = True
     elif o == '--allow-duplicate-apkapex-keys':
       OPTIONS.allow_duplicate_apkapex_keys = True
+    elif o == '--vendor-otatools':
+      OPTIONS.vendor_otatools = a
+    elif o == '--rebuild-sepolicy':
+      OPTIONS.rebuild_sepolicy = True
     elif o == '--keep-tmp':
       OPTIONS.keep_tmp = True
     else:
@@ -1289,6 +1393,8 @@
           'output-super-empty=',
           'rebuild_recovery',
           'allow-duplicate-apkapex-keys',
+          'vendor-otatools=',
+          'rebuild-sepolicy',
           'keep-tmp',
       ],
       extra_option_handler=option_handler)
@@ -1342,7 +1448,9 @@
           output_ota=OPTIONS.output_ota,
           output_img=OPTIONS.output_img,
           output_super_empty=OPTIONS.output_super_empty,
-          rebuild_recovery=OPTIONS.rebuild_recovery), OPTIONS.keep_tmp)
+          rebuild_recovery=OPTIONS.rebuild_recovery,
+          vendor_otatools=OPTIONS.vendor_otatools,
+          rebuild_sepolicy=OPTIONS.rebuild_sepolicy), OPTIONS.keep_tmp)
 
 
 if __name__ == '__main__':
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 42d1211..8face66 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -237,6 +237,7 @@
 import sys
 import zipfile
 
+import care_map_pb2
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
@@ -529,6 +530,8 @@
         'payload_properties.txt',
     )
     self.optional = (
+        # apex_info.pb isn't directly used in the update flow
+        'apex_info.pb',
         # care_map is available only if dm-verity is enabled.
         'care_map.pb',
         'care_map.txt',
@@ -832,6 +835,17 @@
   with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
     common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
                        '\n'.join(ab_partitions))
+    CARE_MAP_ENTRY = "META/care_map.pb"
+    if CARE_MAP_ENTRY in input_zip.namelist():
+      caremap = care_map_pb2.CareMap()
+      caremap.ParseFromString(input_zip.read(CARE_MAP_ENTRY))
+      filtered = [
+          part for part in caremap.partitions if part.name in ab_partitions]
+      del caremap.partitions[:]
+      caremap.partitions.extend(filtered)
+      common.ZipWriteStr(partial_target_zip, CARE_MAP_ENTRY,
+                         caremap.SerializeToString())
+
     for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
       if info_file not in input_zip.namelist():
         logger.warning('Cannot find %s in input zipfile', info_file)
@@ -841,7 +855,8 @@
           content, lambda p: p in ab_partitions)
       common.ZipWriteStr(partial_target_zip, info_file, modified_info)
 
-    # TODO(xunchang) handle 'META/care_map.pb', 'META/postinstall_config.txt'
+    # TODO(xunchang) handle META/postinstall_config.txt'
+
   common.ZipClose(partial_target_zip)
 
   return partial_target_file
@@ -1170,14 +1185,12 @@
     else:
       logger.warning("Cannot find care map file in target_file package")
 
-  # Copy apex_info.pb over to generated OTA package.
-  try:
-    apex_info_entry = target_zip.getinfo("META/apex_info.pb")
-    with target_zip.open(apex_info_entry, "r") as zfp:
-      common.ZipWriteStr(output_zip, "apex_info.pb", zfp.read(),
-                         compress_type=zipfile.ZIP_STORED)
-  except KeyError:
-    logger.warning("target_file doesn't contain apex_info.pb %s", target_file)
+  # Add the source apex version for incremental ota updates, and write the
+  # result apex info to the ota package.
+  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_zip, source_file)
+  if ota_apex_info is not None:
+    common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
+                       compress_type=zipfile.ZIP_STORED)
 
   common.ZipClose(target_zip)
 
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
index ed9d0c3..689ce80 100644
--- a/tools/releasetools/ota_metadata.proto
+++ b/tools/releasetools/ota_metadata.proto
@@ -72,6 +72,8 @@
   int64 version = 2;
   bool is_compressed = 3;
   int64 decompressed_size = 4;
+  // Used in OTA
+  int64 source_version = 5;
 }
 
 // Just a container to hold repeated apex_info, so that we can easily serialize
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 104f02f..28c246b 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -569,3 +569,45 @@
 
   SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
            whole_file=True)
+
+
+def ConstructOtaApexInfo(target_zip, source_file=None):
+  """If applicable, add the source version to the apex info."""
+
+  def _ReadApexInfo(input_zip):
+    if "META/apex_info.pb" not in input_zip.namelist():
+      logger.warning("target_file doesn't contain apex_info.pb %s", input_zip)
+      return None
+
+    with input_zip.open("META/apex_info.pb", "r") as zfp:
+      return zfp.read()
+
+  target_apex_string = _ReadApexInfo(target_zip)
+  # Return early if the target apex info doesn't exist or is empty.
+  if not target_apex_string:
+    return target_apex_string
+
+  # If the source apex info isn't available, just return the target info
+  if not source_file:
+    return target_apex_string
+
+  with zipfile.ZipFile(source_file, "r", allowZip64=True) as source_zip:
+    source_apex_string = _ReadApexInfo(source_zip)
+  if not source_apex_string:
+    return target_apex_string
+
+  source_apex_proto = ota_metadata_pb2.ApexMetadata()
+  source_apex_proto.ParseFromString(source_apex_string)
+  source_apex_versions = {apex.package_name: apex.version for apex in
+                          source_apex_proto.apex_info}
+
+  # If the apex package is available in the source build, initialize the source
+  # apex version.
+  target_apex_proto = ota_metadata_pb2.ApexMetadata()
+  target_apex_proto.ParseFromString(target_apex_string)
+  for target_apex in target_apex_proto.apex_info:
+    name = target_apex.package_name
+    if name in source_apex_versions:
+      target_apex.source_version = source_apex_versions[name]
+
+  return target_apex_proto.SerializeToString()
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 1a00549..e42d417 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1631,88 +1631,6 @@
     self.assertEqual('3', chained_partition_args[1])
     self.assertTrue(os.path.exists(chained_partition_args[2]))
 
-  def test_BuildVBMeta_appendAftlCommandSyntax(self):
-    testdata_dir = test_utils.get_testdata_dir()
-    common.OPTIONS.info_dict = {
-        'ab_update': 'true',
-        'avb_avbtool': 'avbtool',
-        'build.prop': common.PartitionBuildProps.FromDictionary(
-            'system', {
-                'ro.build.version.incremental': '6285659',
-                'ro.product.device': 'coral',
-                'ro.build.fingerprint':
-                'google/coral/coral:R/RP1A.200311.002/'
-                '6285659:userdebug/dev-keys'}
-        ),
-    }
-    common.OPTIONS.aftl_tool_path = 'aftltool'
-    common.OPTIONS.aftl_server = 'log.endpoints.aftl-dev.cloud.goog:9000'
-    common.OPTIONS.aftl_key_path = os.path.join(testdata_dir,
-                                                'test_transparency_key.pub')
-    common.OPTIONS.aftl_manufacturer_key_path = os.path.join(
-        testdata_dir, 'test_aftl_rsa4096.pem')
-
-    vbmeta_image = tempfile.NamedTemporaryFile(delete=False)
-    cmd = common.ConstructAftlMakeImageCommands(vbmeta_image.name)
-    expected_cmd = [
-        'aftltool', 'make_icp_from_vbmeta',
-        '--vbmeta_image_path', 'place_holder',
-        '--output', vbmeta_image.name,
-        '--version_incremental', '6285659',
-        '--transparency_log_servers',
-        'log.endpoints.aftl-dev.cloud.goog:9000,{}'.format(
-            common.OPTIONS.aftl_key_path),
-        '--manufacturer_key', common.OPTIONS.aftl_manufacturer_key_path,
-        '--algorithm', 'SHA256_RSA4096',
-        '--padding', '4096']
-
-    # ignore the place holder, i.e. path to a temp file
-    self.assertEqual(cmd[:3], expected_cmd[:3])
-    self.assertEqual(cmd[4:], expected_cmd[4:])
-
-  @unittest.skip("enable after we have a server for public")
-  def test_BuildVBMeta_appendAftlContactServer(self):
-    testdata_dir = test_utils.get_testdata_dir()
-    common.OPTIONS.info_dict = {
-        'ab_update': 'true',
-        'avb_avbtool': 'avbtool',
-        'build.prop': common.PartitionBuildProps.FromDictionary(
-            'system', {
-                'ro.build.version.incremental': '6285659',
-                'ro.product.device': 'coral',
-                'ro.build.fingerprint':
-                'google/coral/coral:R/RP1A.200311.002/'
-                '6285659:userdebug/dev-keys'}
-        )
-    }
-    common.OPTIONS.aftl_tool_path = "aftltool"
-    common.OPTIONS.aftl_server = "log.endpoints.aftl-dev.cloud.goog:9000"
-    common.OPTIONS.aftl_key_path = os.path.join(testdata_dir,
-                                                'test_transparency_key.pub')
-    common.OPTIONS.aftl_manufacturer_key_path = os.path.join(
-        testdata_dir, 'test_aftl_rsa4096.pem')
-
-    input_dir = common.MakeTempDir()
-    system_image = common.MakeTempFile()
-    build_image_cmd = ['mkuserimg_mke2fs', input_dir, system_image, 'ext4',
-                       '/system', str(4096 * 100), '-j', '0', '-s']
-    common.RunAndCheckOutput(build_image_cmd)
-
-    add_footer_cmd = ['avbtool', 'add_hashtree_footer',
-                      '--partition_size', str(4096 * 150),
-                      '--partition_name', 'system',
-                      '--image', system_image]
-    common.RunAndCheckOutput(add_footer_cmd)
-
-    vbmeta_image = common.MakeTempFile()
-    common.BuildVBMeta(vbmeta_image, {'system': system_image}, 'vbmeta',
-                       ['system'])
-
-    verify_cmd = ['aftltool', 'verify_image_icp', '--vbmeta_image_path',
-                  vbmeta_image, '--transparency_log_pub_keys',
-                  common.OPTIONS.aftl_key_path]
-    common.RunAndCheckOutput(verify_cmd)
-
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_AppendGkiSigningArgs_NoSigningKeyPath(self):
     # A non-GKI boot.img has no gki_signing_key_path.
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 4f61472..835edab 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -265,10 +265,10 @@
         'system': 'system',
         'product': 'product',
         'vendor': 'vendor',
-    }, os.path.join(product_out_dir, 'policy'))
+    })
     self.assertEqual(' '.join(cmd),
                      ('secilc -m -M true -G -N -c 30 '
-                      '-o {OTP}/policy -f /dev/null '
+                      '-o {OTP}/META/combined_sepolicy -f /dev/null '
                       '{OTP}/system/etc/selinux/plat_sepolicy.cil '
                       '{OTP}/system/etc/selinux/mapping/30.0.cil '
                       '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 661712a..11cfee1 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -24,7 +24,7 @@
 import test_utils
 from ota_utils import (
     BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
-    FinalizeMetadata, GetPackageMetadata, PropertyFiles)
+    ConstructOtaApexInfo, FinalizeMetadata, GetPackageMetadata, PropertyFiles)
 from ota_from_target_files import (
     _LoadOemDicts, AbOtaPropertyFiles,
     GetTargetFilesZipForCustomImagesUpdates,
@@ -295,6 +295,35 @@
     uncompressed_apex_size = os.path.getsize(original_apex_filepath)
     self.assertEqual(apex_infos[0].decompressed_size, uncompressed_apex_size)
 
+  @staticmethod
+  def construct_tf_with_apex_info(infos):
+    apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
+    apex_metadata_proto.apex_info.extend(infos)
+
+    output = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(output, 'w') as zfp:
+      common.ZipWriteStr(zfp, "META/apex_info.pb",
+                         apex_metadata_proto.SerializeToString())
+    return output
+
+  def test_ConstructOtaApexInfo_incremental_package(self):
+    infos = [ota_metadata_pb2.ApexInfo(package_name='com.android.apex.1',
+                                       version=1000, is_compressed=False),
+             ota_metadata_pb2.ApexInfo(package_name='com.android.apex.2',
+                                       version=2000, is_compressed=True)]
+    target_file = self.construct_tf_with_apex_info(infos)
+
+    with zipfile.ZipFile(target_file) as target_zip:
+      info_bytes = ConstructOtaApexInfo(target_zip, source_file=target_file)
+    apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
+    apex_metadata_proto.ParseFromString(info_bytes)
+
+    info_list = apex_metadata_proto.apex_info
+    self.assertEqual(2, len(info_list))
+    self.assertEqual('com.android.apex.1', info_list[0].package_name)
+    self.assertEqual(1000, info_list[0].version)
+    self.assertEqual(1000, info_list[0].source_version)
+
   def test_GetPackageMetadata_retrofitDynamicPartitions(self):
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
     common.OPTIONS.retrofit_dynamic_partitions = True
@@ -834,6 +863,7 @@
         property_files.required)
     self.assertEqual(
         (
+            'apex_info.pb',
             'care_map.pb',
             'care_map.txt',
             'compatibility.zip',
@@ -933,6 +963,7 @@
         property_files.required)
     self.assertEqual(
         (
+            'apex_info.pb',
             'care_map.pb',
             'care_map.txt',
             'compatibility.zip',
diff --git a/tools/releasetools/testdata/test_aftl_rsa4096.pem b/tools/releasetools/testdata/test_aftl_rsa4096.pem
deleted file mode 100644
index 89f1ef3..0000000
--- a/tools/releasetools/testdata/test_aftl_rsa4096.pem
+++ /dev/null
@@ -1,52 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDDlhUPUgtWL6LB
-Wybp6wsEJeioV1aRLPGSA2/xIpTiJUK46cb/MD5eBTWjKENoIgX23eL/ePy2I68e
-+WvcZ5ITGOTRQqNVZIdc5qvr03wkV0BsJQMHSMAHacePpB/4xM5MzN/6Ku1wA8Dw
-uK+v/Cw4hqq8H/gP0oPVQ1bwcIePzRPX4YkkyXusoyzTIm5DJ9reVtyFucKqANCN
-aFmGxcaEc2nADtARQWJpO95joFsMvr68+JBxpCt8aWbxuSz/rLJ9Y8Z46V/++XG+
-E4QEob/WVY5pUD/RyogLrfhIf+zO7R3wJklXElSFacIX9+RzR9dgkQVbqxLfBKIP
-XWLCsF4I4EnvqUtaVjIMl8UpZpoq8pDLRqZ71Os5xZYq06x9E02M6DnvFbZEdaOX
-MCz2mmNX3g5FahvJayBhCuNhyTkd79MFR71Wp48TvWxKz3S7q0T0cWHNhtPkHSCa
-KwD93AQnqtLKYDGkHIZBzJPcs+QxbzdHyGzhXZb+qh5KmQvNA9HRBQY1RkMmzIbI
-8pzYTwpOkbCEhVoCWcRaaF1Pgl+zcpgJOMbBBUabx/dConFIhMDW/I5fHgKgwGqm
-tWUibrMPdnfS6W5MXi8jC0eDuZl0VwmdE+4dLujiOofUYnb7D+GXojf3PrSLcTw1
-PmG0f7l5xDKN9a0N+IXqvD2oAANTsQIDAQABAoICAQCW5HXw8OogHvYg2HMIKrbA
-B4McRO1baWIhtRcq4PQeGIMGaA2HmS+0l65O5uRCNWWGlJ7pW+0TlCop6mHFk/4F
-T8JQk2mxmrI4ARqIAQwYeVwRUuioOP81eO1mK0gjQ6qpY7I0reOq9KpozQN18UYo
-gfS82Kkng9EDukUbkKV1UtFJTw3gXLVWdjlB1qFcnCXmPPs7DBpbz+8V+XiAWpsS
-WnwumP77IQeMiozDLdaw2YQMBHRjyDVocWTjfmpyAkleJZjcdagC7W1MKIBElomL
-EUyigTALaYZWBGy1ekQ3TIY5XUBdtZ2RpAsDNNOCAN3v+VI565zOhCOHWRO1gh24
-vyhBFR0HYqBRoLbLAqo8bM5iLPz1EWGyaTnfxt38J8Va0TD7KihcBnphiA+dkhEF
-oc0yIp/8S2o3CfkNok7Ju8Amb7M4JJuKhuP8wxn86fAHpjjd3Y4SlZp0NrTrd7T2
-msLIneb1OUZZxFxyJG1XQGEZplLPalnGadIF4p3q/3nd1rVb491qCNl/A5QwhI9r
-ZV62O90M9fu3+cAynBLbMT09IZecNwP1gXmunlY6YH+ymM+3NFqC8q2tnzomiz8/
-Fee0ftZ2C/jK62fET0Y8LPWGkVQGHtvZH0FPg4suA0GMmYAe0tQl93A+jFltfKKZ
-RgCDrYs6Wv76E9gnWVnEdQKCAQEA8L76LjZUTKOg83Bra+hP+cXnwGsgwOwJfGBp
-OM++5HzlpYjtbD38esBZVJtwb/8xJGdsHtP2n7ZgbSDuAnRj5S50QHIApvRkz1Y+
-1hL8tAdgVP2JkYjpyG3bPk4QVKyXkKvBcp2BCidXs75+HzfOxqkazumaYOYo2guh
-azHdka2xSqxcZqo4yyORc/oue25RU4skmuNDOlP0+OTxU/uXnl7QZmlaOfT5TqO4
-s7uER4BXt/87j44mnOBdXmtqrsL49+R9bzVskx76aeuaBbwf7jnpR058E71OZwSd
-F1P3fx6hl0yLOZF/5Jnq+14rEna6jH50XtzlhB6deSZFTOw2gwKCAQEAz/qXRzwH
-I0YWISgkUG2zBJseHmfHqV4CDzb5+tTJ3B2I8cXE0m2sQJXi2s7oMhWSc1cQOHCX
-txpgWaD59uBz2lcwnGRNp27TRXv8Wo+X0+O+lGWU2cO+j8AB2Vtb7F7rCySp0+Uu
-z+dBfoQ2zhKEQlkX0YldVILGzCL3QBHVvPC4iDlwkMRbcejDoh9NsBtHL8lG+MAw
-ZXbwJjhaJkhTXJFpJpejq70naS8VVlLt8Os80iuBXe5JK/ecAHtsNcJlXO02sMNZ
-Fbcy8WosGyvRKQ/tHtTjAlxZ7Ey8usWE8BvWBdUgiIBkIcjLtE2GrA8eOGNb3v1I
-HRt8NsV8yaLWuwKCAQAR7SaT6le8nTKO7gARuOq7npDzMwbtVqYeLM+o+08rlGFF
-QjzronH6cfg05J4quMXgABN8+CuVGO91MM6IQEJv/lWJtvN1ex1GkxV6u0812JbD
-vV1RCPDfi86XhRiSNYfTrfZponDJYMSXDcg2auFqyYzFe3+TV5ATLGqIoN3uyxA4
-jz0SJ/qypaNfD3IGnuBPaD0Bi4ql/TpwjhuqNUHE+SprdczSI/usb2SBfaUL7fKa
-MNcuiVc2tz48maMIAFypmMn+TewXyGa9HF4Lr0ZxZr6IIL/8eEwuP5my8v2q6Yz+
-xyRW1Q7A5vUoYoqyhUS+0Wu45JnyjJUNQFxIrg4hAoIBAF1uBIGSvN4iwRQ6FT4w
-WahrCre8BVzXh3NQTjJZXylL91YtcwLZE/Wbn+KN6o99U2IPLZE9O1qdNcVt5Hz8
-Te87FfJbuOrLhYuEbFQ+h4U/nUDK9XhyT+wB5JLBUOU5qrtByC0Rmtr411o/iONA
-PDwWC/YskEnDygywdIRKvsr3FN7VdvUB0Na2KxRsnZjMWElmUUS0Ccm7CZ0R2aWy
-/gfqpuMYYgVnnwnIhfxWmt+MvbDorGAHCMYAoQsyZuUrpB9/zP7RcvanavI6sP+v
-ynF43xvnpOdNl3Po8SuyScsXpijOmqPXkaP/sUsZPLOUww2vzPi6raetzjpIs4td
-ZLsCggEAe42Zj3FEbruJZeDgmd9lSc0j8UF90mNw8KH44IbuA6R9fGv3WkrNHEVd
-XZOwjWqAxhOj6pFoJk8n6h5d8iS/yXFZ0AfBMc21XMecu9mnfx9E9LFAIWmv7Wut
-vy3h2BqY+crglpg5RAw+3J97HAGMYCvp+hH2il+9zzjpmCtTD21LRMkw34szY7RR
-CDy9G5FTmKVlxw5eegvyj164olQRLurEdUIfSr5UnBjrWftJHy9JW8KWCeFDSmm9
-xCl3nGDyQuZmOTngxPtrOYAhb5LoKR9BeGcy6jlom7V4nYYqm3t1IDBgMqjYGT9c
-vqQgxO2OFsQOJQ/4PRYEKd1neTlZrw==
------END PRIVATE KEY-----
diff --git a/tools/releasetools/testdata/test_transparency_key.pub b/tools/releasetools/testdata/test_transparency_key.pub
deleted file mode 100644
index 8bfd816..0000000
--- a/tools/releasetools/testdata/test_transparency_key.pub
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN PUBLIC KEY-----
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4ilqCNsenNA013iCdwgD
-YPxZ853nbHG9lMBp9boXiwRcqT/8bUKHIL7YX5z7s+QoRYVY3rkMKppRabclXzyx
-H59YnPMaU4uv7NqwWzjgaZo7E+vo7IF+KBjV3cJulId5Av0yIYUCsrwd7MpGtWdC
-Q3S+7Vd4zwzCKEhcvliNIhnNlp1U3wNkPCxOyCAsMEn6k8O5ar12ke5TvxDv15db
-rPDeHh8G2OYWoCkWL+lSN35L2kOJqKqVbLKWrrOd96RCYrrtbPCi580OADJRcUlG
-lgcjwmNwmypBWvQMZ6ITj0P0ksHnl1zZz1DE2rXe1goLI1doghb5KxLaezlR8c2C
-E3w/uo9KJgNmNgUVzzqZZ6FE0moyIDNOpP7KtZAL0DvEZj6jqLbB0ccPQElrg52m
-Dv2/A3nYSr0mYBKeskT4+Bg7PGgoC8p7WyLSxMyzJEDYdtrj9OFx6eZaA23oqTQx
-k3Qq5H8RfNBeeSUEeKF7pKH/7gyqZ2bNzBFMA2EBZgBozwRfaeN/HCv3qbaCnwvu
-6caacmAsK+RxiYxSL1QsJqyhCWWGxVyenmxdc1KG/u5ypi7OIioztyzR3t2tAzD3
-Nb+2t8lgHBRxbV24yiPlnvPmB1ZYEctXnlRR9Evpl1o9xA9NnybPHKr9rozN39CZ
-V/USB8K6ao1y5xPZxa8CZksCAwEAAQ==
------END PUBLIC KEY-----
-
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 401857f..cfe3139 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -194,7 +194,8 @@
 
     # Check we have the same recovery target in the check and flash commands.
     assert check_partition == flash_partition, \
-        "Mismatching targets: {} vs {}".format(check_partition, flash_partition)
+        "Mismatching targets: {} vs {}".format(
+            check_partition, flash_partition)
 
     # Validate the SHA-1 of the recovery image.
     recovery_sha1 = flash_partition.split(':')[3]
@@ -248,6 +249,29 @@
     os.symlink(os.path.join(src, filename), os.path.join(dst, filename))
 
 
+def ValidatePartitionFingerprints(input_tmp, info_dict):
+  build_info = common.BuildInfo(info_dict)
+  # Expected format:
+  #  Prop: com.android.build.vendor.fingerprint -> 'generic/aosp_cf_x86_64_phone/vsoc_x86_64:S/AOSP.MASTER/7335886:userdebug/test-keys'
+  #  Prop: com.android.build.vendor_boot.fingerprint -> 'generic/aosp_cf_x86_64_phone/vsoc_x86_64:S/AOSP.MASTER/7335886:userdebug/test-keys'
+  p = re.compile(
+      r"Prop: com.android.build.(?P<partition>\w+).fingerprint -> '(?P<fingerprint>[\w\/:\.-]+)'")
+  for vbmeta_partition in ["vbmeta", "vbmeta_system"]:
+    image = os.path.join(input_tmp, "IMAGES", vbmeta_partition + ".img")
+    output = common.RunAndCheckOutput(
+        [info_dict["avb_avbtool"], "info_image", "--image", image])
+    matches = p.findall(output)
+    for (partition, fingerprint) in matches:
+      actual_fingerprint = build_info.GetPartitionFingerprint(
+          partition)
+      if actual_fingerprint is None:
+        logging.warning(
+            "Failed to get fingerprint for partition %s", partition)
+        continue
+      assert fingerprint == actual_fingerprint, "Fingerprint mismatch for partition {}, expected: {} actual: {}".format(
+          partition, fingerprint, actual_fingerprint)
+
+
 def ValidateVerifiedBootImages(input_tmp, info_dict, options):
   """Validates the Verified Boot related images.
 
@@ -273,7 +297,7 @@
   # longer copied from RADIO to the IMAGES folder. But avbtool assumes that
   # images are in IMAGES folder. So we symlink them.
   symlinkIfNotExists(os.path.join(input_tmp, "RADIO"),
-                    os.path.join(input_tmp, "IMAGES"))
+                     os.path.join(input_tmp, "IMAGES"))
   # Verified boot 1.0 (images signed with boot_signer and verity_signer).
   if info_dict.get('boot_signer') == 'true':
     logging.info('Verifying Verified Boot images...')
@@ -325,11 +349,12 @@
     if info_dict.get("system_root_image") != "true":
       verity_key_ramdisk = os.path.join(
           input_tmp, 'BOOT', 'RAMDISK', 'verity_key')
-      assert os.path.exists(verity_key_ramdisk), 'Missing verity_key in ramdisk'
+      assert os.path.exists(
+          verity_key_ramdisk), 'Missing verity_key in ramdisk'
 
       assert filecmp.cmp(
           verity_key_mincrypt, verity_key_ramdisk, shallow=False), \
-              'Mismatching verity_key files in root and ramdisk'
+          'Mismatching verity_key files in root and ramdisk'
       logging.info('Verified the content of /verity_key in ramdisk')
 
     # Then verify the verity signed system/vendor/product images, against the
@@ -362,6 +387,8 @@
     if key is None:
       key = info_dict['avb_vbmeta_key_path']
 
+    ValidatePartitionFingerprints(input_tmp, info_dict)
+
     # avbtool verifies all the images that have descriptors listed in vbmeta.
     # Using `--follow_chain_partitions` so it would additionally verify chained
     # vbmeta partitions (e.g. vbmeta_system).
@@ -411,7 +438,7 @@
 
     # avbtool verifies recovery image for non-A/B devices.
     if (info_dict.get('ab_update') != 'true' and
-        info_dict.get('no_recovery') != 'true'):
+            info_dict.get('no_recovery') != 'true'):
       image = os.path.join(input_tmp, 'IMAGES', 'recovery.img')
       key = info_dict['avb_recovery_key_path']
       cmd = [info_dict['avb_avbtool'], 'verify_image', '--image', image,
@@ -427,21 +454,21 @@
 
 
 def CheckDataInconsistency(lines):
-    build_prop = {}
-    for line in lines:
-      if line.startswith("import") or line.startswith("#"):
-        continue
-      if "=" not in line:
-        continue
+  build_prop = {}
+  for line in lines:
+    if line.startswith("import") or line.startswith("#"):
+      continue
+    if "=" not in line:
+      continue
 
-      key, value = line.rstrip().split("=", 1)
-      if key in build_prop:
-        logging.info("Duplicated key found for {}".format(key))
-        if value != build_prop[key]:
-          logging.error("Key {} is defined twice with different values {} vs {}"
-                        .format(key, value, build_prop[key]))
-          return key
-      build_prop[key] = value
+    key, value = line.rstrip().split("=", 1)
+    if key in build_prop:
+      logging.info("Duplicated key found for {}".format(key))
+      if value != build_prop[key]:
+        logging.error("Key {} is defined twice with different values {} vs {}"
+                      .format(key, value, build_prop[key]))
+        return key
+    build_prop[key] = value
 
 
 def CheckBuildPropDuplicity(input_tmp):
diff --git a/tools/signapk/OWNERS b/tools/signapk/OWNERS
index 0b8d398..23cab0b 100644
--- a/tools/signapk/OWNERS
+++ b/tools/signapk/OWNERS
@@ -1,2 +1,2 @@
 cbrubaker@google.com
-klyubin@google.com
+mpgroover@google.com
diff --git a/tools/warn/cpp_warn_patterns.py b/tools/warn/cpp_warn_patterns.py
index 2fa9916..90759d9 100644
--- a/tools/warn/cpp_warn_patterns.py
+++ b/tools/warn/cpp_warn_patterns.py
@@ -91,6 +91,8 @@
          [r".*: warning: incompatible redeclaration of library function .+"]),
     high('Null passed as non-null argument',
          [r".*: warning: Null passed to a callee that requires a non-null"]),
+    medium('Unused command line argument',
+           [r".*: warning: argument unused during compilation: .+"]),
     medium('Unused parameter',
            [r".*: warning: unused parameter '.*'"]),
     medium('Unused function, variable, label, comparison, etc.',
@@ -166,6 +168,8 @@
            [r".*: warning: '.+' declared with greater visibility than the type of its field '.+'"]),
     medium('Shift count greater than width of type',
            [r".*: warning: (left|right) shift count >= width of type"]),
+    medium('Shift operator precedence',
+           [r".*: warning: operator .* has lower precedence .+Wshift-op-parentheses.+"]),
     medium('extern &lt;foo&gt; is initialized',
            [r".*: warning: '.+' initialized and declared 'extern'",
             r".*: warning: 'extern' variable has an initializer"]),
@@ -239,6 +243,8 @@
            [r".*: warning: ignoring #pragma .+"]),
     medium('Pragma warning messages',
            [r".*: warning: .+W#pragma-messages"]),
+    medium('Pragma once in main file',
+           [r".*: warning: #pragma once in main file .+Wpragma-once-outside-header.*"]),
     medium('Variable might be clobbered by longjmp or vfork',
            [r".*: warning: variable '.+' might be clobbered by 'longjmp' or 'vfork'"]),
     medium('Argument might be clobbered by longjmp or vfork',
@@ -333,7 +339,7 @@
     low('Deprecated register',
         [r".*: warning: 'register' storage class specifier is deprecated"]),
     low('Converts between pointers to integer types with different sign',
-        [r".*: warning: .+ converts between pointers to integer types with different sign"]),
+        [r".*: warning: .+ converts between pointers to integer types .+Wpointer-sign\]"]),
     harmless('Extra tokens after #endif',
              [r".*: warning: extra tokens at end of #endif directive"]),
     medium('Comparison between different enums',
@@ -410,6 +416,32 @@
         [r".*: warning: missing .+Winvalid-pp-token"]),
     low('need glibc to link',
         [r".*: warning: .* requires at runtime .* glibc .* for linking"]),
+    low('Add braces to avoid dangling else',
+        [r".*: warning: add explicit braces to avoid dangling else"]),
+    low('Assigning value to self',
+        [r".*: warning: explicitly assigning value of .+ to itself"]),
+    low('Comparison of integers of different signs',
+        [r".*: warning: comparison of integers of different signs.+sign-compare"]),
+    low('Incompatible pointer types',
+        [r".*: warning: incompatible .*pointer types .*-Wincompatible-.*pointer-types"]),
+    low('Missing braces',
+        [r".*: warning: suggest braces around initialization of",
+         r".*: warning: too many braces around scalar initializer .+Wmany-braces-around-scalar-init",
+         r".*: warning: braces around scalar initializer"]),
+    low('Missing field initializers',
+        [r".*: warning: missing field '.+' initializer"]),
+    low('Typedef redefinition',
+        [r".*: warning: redefinition of typedef '.+' is a C11 feature"]),
+    low('GNU old-style field designator',
+        [r".*: warning: use of GNU old-style field designator extension"]),
+    low('Initializer overrides prior initialization',
+        [r".*: warning: initializer overrides prior initialization of this subobject"]),
+    low('GNU extension, variable sized type not at end',
+        [r".*: warning: field '.+' with variable sized type '.+' not at the end of a struct or class"]),
+    low('Comparison of constant is always false/true',
+        [r".*: comparison of .+ is always .+Wtautological-constant-out-of-range-compare"]),
+    low('Hides overloaded virtual function',
+        [r".*: '.+' hides overloaded virtual function"]),
     medium('Operator new returns NULL',
            [r".*: warning: 'operator new' must not return NULL unless it is declared 'throw\(\)' .+"]),
     medium('NULL used in arithmetic',
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index ac5d4b7..ef173bc 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -328,7 +328,8 @@
     cur_row_class = 1 - cur_row_class
     # remove last '\n'
     out_text = text[:-1] if text[-1] == '\n' else text
-    writer('<tr><td class="c' + str(cur_row_class) + '">' + out_text + '</td></tr>')
+    writer('<tr><td class="c' + str(cur_row_class) + '">'
+           + out_text + '</td></tr>')
   writer('</table></div>')
   writer('</blockquote>')
 
@@ -355,7 +356,8 @@
   sort_warnings(warn_patterns)
   total = 0
   for severity in Severity.levels:
-    total += write_severity(csvwriter, severity, severity.column_header, warn_patterns)
+    total += write_severity(
+        csvwriter, severity, severity.column_header, warn_patterns)
   csvwriter.writerow([total, '', 'All warnings'])
 
 
diff --git a/tools/warn/java_warn_patterns.py b/tools/warn/java_warn_patterns.py
index 534f48d..3f5da9d 100644
--- a/tools/warn/java_warn_patterns.py
+++ b/tools/warn/java_warn_patterns.py
@@ -74,6 +74,8 @@
                 [r'.*\.class\): warning: Cannot find annotation method .+ in']),
     java_medium('No class/method in SDK ...',
                 [r'.*\.java:.*: warning: No such (class|method) .* for SDK']),
+    java_medium('Unknown enum constant',
+                [r'unknown_source_file: warning: unknown enum constant .+']),
     # Warnings generated by Error Prone
     java_medium('Non-ascii characters used, but ascii encoding specified',
                 [r".*: warning: unmappable character for encoding ascii"]),
@@ -207,6 +209,8 @@
            'Logging or rethrowing exceptions should usually be preferred to catching and calling printStackTrace'),
     medium('CatchFail',
            'Ignoring exceptions and calling fail() is unnecessary, and makes test output less useful'),
+    medium('ChangedAbstract',
+           'Method has changed \'abstract\' qualifier'),
     medium('ClassCanBeStatic',
            'Inner class is non-static but does not reference enclosing class'),
     medium('ClassNewInstance',
@@ -355,6 +359,8 @@
            'equals method doesn\'t override Object.equals'),
     medium('NotCloseable',
            'Not closeable'),
+    medium('NullableCollection',
+           'Method should not return a nullable collection'),
     medium('NullableConstructor',
            'Constructors should not be annotated with @Nullable since they cannot return null'),
     medium('NullableDereference',
@@ -801,6 +807,8 @@
                 [r".*: warning: \[path\] bad path element .*\.jar"]),
     java_medium('Supported version from annotation processor',
                 [r".*: warning: Supported source version .+ from annotation processor"]),
+    java_medium('Schema export directory is not provided',
+                [r".*\.(java|kt):.*: warning: Schema export directory is not provided"]),
 ]
 
 compile_patterns(warn_patterns)
diff --git a/tools/warn/make_warn_patterns.py b/tools/warn/make_warn_patterns.py
index a54c502..11ad5cc 100644
--- a/tools/warn/make_warn_patterns.py
+++ b/tools/warn/make_warn_patterns.py
@@ -35,6 +35,9 @@
     {'category': 'make', 'severity': Severity.HIGH,
      'description': 'System module linking to a vendor module',
      'patterns': [r".*: warning: .+ \(.+\) should not link to .+ \(partition:.+\)"]},
+    {'category': 'make', 'severity': Severity.HIGH,
+     'description': 'make: lstat file does not exist',
+     'patterns': [r".*: warning: lstat .+: file does not exist"]},
     {'category': 'make', 'severity': Severity.MEDIUM,
      'description': 'Invalid SDK/NDK linking',
      'patterns': [r".*: warning: .+ \(.+\) should not link to .+ \(.+\)"]},
@@ -56,6 +59,9 @@
     {'category': 'make', 'severity': Severity.MEDIUM,
      'description': 'make: deprecated macros',
      'patterns': [r".*\.mk:.* warning:.* [A-Z_]+ (is|has been) deprecated."]},
+    {'category': 'make', 'severity': Severity.MEDIUM,
+     'description': 'make: other Android.mk warnings',
+     'patterns': [r".*/Android.mk:.*: warning: .+"]},
 ]
 
 
diff --git a/tools/warn/other_warn_patterns.py b/tools/warn/other_warn_patterns.py
index d05c8e9..c95528c 100644
--- a/tools/warn/other_warn_patterns.py
+++ b/tools/warn/other_warn_patterns.py
@@ -75,37 +75,15 @@
     # misc warnings
     misc('Duplicate logtag',
          [r".*: warning: tag \".+\" \(.+\) duplicated in .+"]),
-    misc('Typedef redefinition',
-         [r".*: warning: redefinition of typedef '.+' is a C11 feature"]),
-    misc('GNU old-style field designator',
-         [r".*: warning: use of GNU old-style field designator extension"]),
-    misc('Missing field initializers',
-         [r".*: warning: missing field '.+' initializer"]),
-    misc('Missing braces',
-         [r".*: warning: suggest braces around initialization of",
-          r".*: warning: too many braces around scalar initializer .+Wmany-braces-around-scalar-init",
-          r".*: warning: braces around scalar initializer"]),
-    misc('Comparison of integers of different signs',
-         [r".*: warning: comparison of integers of different signs.+sign-compare"]),
-    misc('Add braces to avoid dangling else',
-         [r".*: warning: add explicit braces to avoid dangling else"]),
-    misc('Initializer overrides prior initialization',
-         [r".*: warning: initializer overrides prior initialization of this subobject"]),
-    misc('Assigning value to self',
-         [r".*: warning: explicitly assigning value of .+ to itself"]),
-    misc('GNU extension, variable sized type not at end',
-         [r".*: warning: field '.+' with variable sized type '.+' not at the end of a struct or class"]),
-    misc('Comparison of constant is always false/true',
-         [r".*: comparison of .+ is always .+Wtautological-constant-out-of-range-compare"]),
-    misc('Hides overloaded virtual function',
-         [r".*: '.+' hides overloaded virtual function"]),
-    misc('Incompatible pointer types',
-         [r".*: warning: incompatible .*pointer types .*-Wincompatible-.*pointer-types"]),
     # Assembler warnings
     asm('ASM value size does not match register size',
         [r".*: warning: value size does not match register size specified by the constraint and modifier"]),
     asm('IT instruction is deprecated',
         [r".*: warning: applying IT instruction .* is deprecated"]),
+    asm('section flags ignored',
+        [r".*: warning: section flags ignored on section redeclaration"]),
+    asm('setjmp/longjmp/vfork changed binding',
+        [r".*: warning: .*(setjmp|longjmp|vfork) changed binding to .*"]),
     # NDK warnings
     {'category': 'NDK', 'severity': Severity.HIGH,
      'description': 'NDK: Generate guard with empty availability, obsoleted',
@@ -168,6 +146,9 @@
     {'category': 'RenderScript', 'severity': Severity.LOW,
      'description': 'RenderScript warnings',
      'patterns': [r'.*\.rscript:.*: warning: ']},
+    {'category': 'RenderScript', 'severity': Severity.HIGH,
+     'description': 'RenderScript is deprecated',
+     'patterns': [r'.*: warning: Renderscript is deprecated:.+']},
     # Broken/partial warning messages will be skipped.
     {'category': 'Misc', 'severity': Severity.SKIP,
      'description': 'skip, ,',
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index 5233f0a..fcad96c 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -87,7 +87,7 @@
     }
 
     /*
-     * Sanity-check the LFH.  Note that this will fail if the "kUsesDataDescr"
+     * Check the LFH.  Note that this will fail if the "kUsesDataDescr"
      * flag is set, because the LFH is incomplete.  (Not a problem, since we
      * prefer the CDE values.)
      */