Merge "Remove GKI from gsi_$arch"
diff --git a/Android.bp b/Android.bp
deleted file mode 100644
index ab2564e..0000000
--- a/Android.bp
+++ /dev/null
@@ -1,49 +0,0 @@
-//
-// Copyright (C) 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package {
-    default_applicable_licenses: ["build_make_license"],
-}
-
-// Added automatically by a large-scale-change that took the approach of
-// 'apply every license found to every target'. While this makes sure we respect
-// every license restriction, it may not be entirely correct.
-//
-// e.g. GPL in an MIT project might only apply to the contrib/ directory.
-//
-// Please consider splitting the single license below into multiple licenses,
-// taking care not to lose any license_kind information, and overriding the
-// default license using the 'licenses: [...]' property on targets as needed.
-//
-// For unused files, consider creating a 'fileGroup' with "//visibility:private"
-// to attach the license to, and including a comment whether the files may be
-// used in the current project.
-// See: http://go/android-license-faq
-license {
-    name: "build_make_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-        "SPDX-license-identifier-BSD",
-        "SPDX-license-identifier-CC-BY",
-        "SPDX-license-identifier-GPL",
-        "SPDX-license-identifier-GPL-2.0",
-        "SPDX-license-identifier-LGPL",
-        "SPDX-license-identifier-MIT",
-        "legacy_not_a_contribution",
-        "legacy_restricted",
-    ],
-    // large-scale-change unable to identify any license_text files
-}
diff --git a/Changes.md b/Changes.md
index 1ab005f..cabbed6 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,36 @@
 # Build System Changes for Android.mk Writers
 
+## Genrule starts disallowing directory inputs
+
+To better specify the inputs to the build, we are restricting use of directories
+as inputs to genrules.
+
+To fix existing uses, change inputs to specify the inputs and update the command
+accordingly. For example:
+
+```
+genrule: {
+    name: "foo",
+    srcs: ["bar"],
+    cmd: "cp $(location bar)/*.xml $(gendir)",
+    ...
+}
+```
+
+would become
+
+```
+genrule: {
+    name: "foo",
+    srcs: ["bar/*.xml"],
+    cmd: "cp $(in) $(gendir)",
+    ...
+}
+```
+
+`BUILD_BROKEN_INPUT_DIR_MODULES` can be used to allowlist specific directories
+with genrules that have input directories.
+
 ## Dexpreopt starts enforcing `<uses-library>` checks (for Java modules)
 
 In order to construct correct class loader context for dexpreopt, build system
diff --git a/METADATA b/METADATA
index 814cb00..44781a7 100644
--- a/METADATA
+++ b/METADATA
@@ -1,8 +1,8 @@
 third_party {
-  # would be NOTICE save for GPL in:
-  #   core/LINUX_KERNEL_COPYING
-  #   tools/droiddoc/templates-pdk/assets/jquery-1.6.2.min.js
-  #   tools/droiddoc/templates-pdk/assets/jquery-history.js
-  #   tools/droiddoc/templates-pdk/assets/jquery-resizable.min.js
+  license_note: "would be NOTICE save for GPL in:\n"
+  "   core/LINUX_KERNEL_COPYING\n"
+  "   tools/droiddoc/templates-pdk/assets/jquery-1.6.2.min.js\n"
+  "   tools/droiddoc/templates-pdk/assets/jquery-history.js\n"
+  "   tools/droiddoc/templates-pdk/assets/jquery-resizable.min.js"
   license_type: RESTRICTED
 }
diff --git a/core/Makefile b/core/Makefile
index c45fc15..cf50dc8 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -729,6 +729,8 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES)
 
+$(eval $(call declare-0p-target,$(event_log_tags_file)))
+
 event-log-tags: $(event_log_tags_file)
 
 ALL_DEFAULT_INSTALLED_MODULES += $(event_log_tags_file)
@@ -780,6 +782,8 @@
 	$(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_ROOT))
+
 ifeq ($(HOST_OS),linux)
 $(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
 endif
@@ -892,7 +896,12 @@
 # $1: boot image file name
 # $2: boot image variant (boot, boot-debug, boot-test-harness)
 define get-bootimage-partition-size
-  $(BOARD_$(call to-upper,$(subst .img,,$(subst $(2),kernel,$(notdir $(1)))))_BOOTIMAGE_PARTITION_SIZE)
+$(BOARD_$(call to-upper,$(subst .img,,$(subst $(2),kernel,$(notdir $(1)))))_BOOTIMAGE_PARTITION_SIZE)
+endef
+
+# $1: partition size
+define get-partition-size-argument
+  $(if $(1),--partition_size $(1),--dynamic_partition_size)
 endef
 
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
@@ -901,11 +910,9 @@
 
 INTERNAL_INIT_BOOT_IMAGE_ARGS :=
 
-INTERNAL_BOOT_HAS_RAMDISK :=
 ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
   ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
     INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
-    INTERNAL_BOOT_HAS_RAMDISK := true
   else
     INTERNAL_INIT_BOOT_IMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
   endif
@@ -950,9 +957,15 @@
   endif
 endif # BUILDING_VENDOR_BOOT_IMAGE == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true
 
-INTERNAL_MKBOOTIMG_VERSION_ARGS := \
+ifdef BOARD_GKI_SIGNING_KEY_PATH
+  # GKI boot images will not set system version & SPL value in the header.
+  # They can be set by the device manufacturer in the AVB properties instead.
+  INTERNAL_MKBOOTIMG_VERSION_ARGS :=
+else
+  INTERNAL_MKBOOTIMG_VERSION_ARGS := \
     --os_version $(PLATFORM_VERSION_LAST_STABLE) \
     --os_patch_level $(PLATFORM_SECURITY_PATCH)
+endif # BOARD_GKI_SIGNING_KEY_PATH
 
 # $(1): image target to certify
 # $(2): out certificate target
@@ -968,7 +981,6 @@
 
 INTERNAL_GKI_CERTIFICATE_ARGS :=
 INTERNAL_GKI_CERTIFICATE_DEPS :=
-INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE :=
 ifdef BOARD_GKI_SIGNING_KEY_PATH
   ifndef BOARD_GKI_SIGNING_ALGORITHM
     $(error BOARD_GKI_SIGNING_ALGORITHM should be defined with BOARD_GKI_SIGNING_KEY_PATH)
@@ -989,13 +1001,6 @@
     $(BOARD_GKI_SIGNING_KEY_PATH) \
     $(AVBTOOL)
 
-  ifdef INSTALLED_RAMDISK_TARGET
-    INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE := \
-      $(call intermediates-dir-for,PACKAGING,generic_ramdisk)/boot_signature
-
-    $(INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE): $(INSTALLED_RAMDISK_TARGET) $(INTERNAL_GKI_CERTIFICATE_DEPS)
-	$(call generate_generic_boot_image_certificate,$(INSTALLED_RAMDISK_TARGET),$@,generic_ramdisk,$(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS))
-  endif
 endif
 
 # Define these only if we are building boot
@@ -1013,25 +1018,24 @@
 # $1: boot image target
 define build_boot_board_avb_enabled
   $(eval kernel := $(call bootimage-to-kernel,$(1)))
+  $(MKBOOTIMG) --kernel $(kernel) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
   $(if $(BOARD_GKI_SIGNING_KEY_PATH), \
+    $(eval boot_signature := $(call intermediates-dir-for,PACKAGING,generic_boot)/$(notdir $(1)).boot_signature) \
     $(eval kernel_signature := $(call intermediates-dir-for,PACKAGING,generic_kernel)/$(notdir $(kernel)).boot_signature) \
+    $(call generate_generic_boot_image_certificate,$(1),$(boot_signature),boot,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
     $(call generate_generic_boot_image_certificate,$(kernel),$(kernel_signature),generic_kernel,$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)) $(newline) \
-    $(if $(INTERNAL_BOOT_HAS_RAMDISK), \
-      cat $(INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE) >> $(kernel_signature) $(newline)))
-  $(MKBOOTIMG) --kernel $(kernel) $(INTERNAL_BOOTIMAGE_ARGS) \
-    $(if $(BOARD_GKI_SIGNING_KEY_PATH),--boot_signature "$(kernel_signature)",$(INTERNAL_MKBOOTIMG_VERSION_ARGS)) \
-    $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+    cat $(kernel_signature) >> $(boot_signature) $(newline) \
+    $(call assert-max-image-size,$(boot_signature),16 << 10) $(newline) \
+    truncate -s $$(( 16 << 10 )) $(boot_signature) $(newline) \
+    cat "$(boot_signature)" >> $(1))
   $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot)))
   $(AVBTOOL) add_hash_footer \
           --image $(1) \
-          --partition_size $(call get-bootimage-partition-size,$(1),boot) \
+          $(call get-partition-size-argument,$(call get-bootimage-partition-size,$(1),boot)) \
           --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
           $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 endef
 
-ifdef INTERNAL_BOOT_HAS_RAMDISK
-$(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE)
-endif
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_board_avb_enabled,$@)
@@ -1107,7 +1111,7 @@
 	cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
-	    --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
+	    $(call get-partition-size-argument,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) \
 	    --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
 	    $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 else
@@ -1136,16 +1140,13 @@
 endif
 
 ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE)
 $(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_INIT_BOOT_KEY_PATH)
 	$(call pretty,"Target init_boot image: $@")
-	$(MKBOOTIMG) $(INTERNAL_INIT_BOOT_IMAGE_ARGS) \
-	  $(if $(BOARD_GKI_SIGNING_KEY_PATH),--boot_signature "$(INTERNAL_GENERIC_RAMDISK_BOOT_SIGNATURE)",$(INTERNAL_MKBOOTIMG_VERSION_ARGS)) \
-	  $(BOARD_MKBOOTIMG_INIT_ARGS) --output "$@"
+	$(MKBOOTIMG) $(INTERNAL_INIT_BOOT_IMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_INIT_ARGS) --output "$@"
 	$(call assert-max-image-size,$@,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE))
 	$(AVBTOOL) add_hash_footer \
            --image $@ \
-	   --partition_size $(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE) \
+	   $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \
 	   --partition_name init_boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
 	   $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS)
 else
@@ -1166,7 +1167,7 @@
 	cp $(INTERNAL_PREBUILT_INIT_BOOT_IMAGE) $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
-	    --partition_size $(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE) \
+	    $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \
 	    --partition_name boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
 	    $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS)
 else
@@ -1179,6 +1180,7 @@
 endif # BOARD_PREBUILT_INIT_BOOT_IMAGE
 
 endif # BUILDING_INIT_BOOT_IMAGE is not true
+
 # -----------------------------------------------------------------
 # vendor boot image
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
@@ -1293,7 +1295,7 @@
 	$(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
 	$(AVBTOOL) add_hash_footer \
            --image $@ \
-	   --partition_size $(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE) \
+	   $(call get-partition-size-argument,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) \
 	   --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_SIGNING_ARGS) \
 	   $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS)
 else
@@ -1368,8 +1370,6 @@
 # TARGET_OUT_NOTICE_FILES now that the notice files are gathered from
 # the src subdirectory.
 target_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE.txt
-tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt
-tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
 kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
 winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
 
@@ -1432,6 +1432,11 @@
 target_odm_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_ODM_DLKM.xml.gz
 installed_odm_dlkm_notice_xml_gz := $(TARGET_OUT_ODM_DLKM)/etc/NOTICE.xml.gz
 
+target_system_dlkm_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.txt
+target_system_dlkm_notice_file_xml := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.xml
+target_system_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.xml.gz
+installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
+
 # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
@@ -1452,13 +1457,15 @@
 license_modules_odm := $(filter $(TARGET_OUT_ODM)/%,$(license_modules))
 license_modules_vendor_dlkm := $(filter $(TARGET_OUT_VENDOR_DLKM)/%,$(license_modules))
 license_modules_odm_dlkm := $(filter $(TARGET_OUT_ODM_DLKM)/%,$(license_modules))
+license_modules_odm_dlkm := $(filter $(TARGET_OUT_SYSTEM_DLKM)/%,$(license_modules))
 license_modules_agg := $(license_modules_system) \
                        $(license_modules_vendor) \
                        $(license_modules_product) \
                        $(license_modules_system_ext) \
                        $(license_modules_odm) \
                        $(license_modules_vendor_dlkm) \
-                       $(license_modules_odm_dlkm)
+                       $(license_modules_odm_dlkm) \
+                       $(license_modules_system_dlkm)
 # targets used for debug symbols only and do not get copied to the device
 license_modules_symbols_only := $(filter $(PRODUCT_OUT)/apex/%,$(license_modules))
 
@@ -1550,6 +1557,13 @@
 	        $(TARGET_OUT_NOTICE_FILES), \
 	        $(license_modules_odm_dlkm), \
 	        $(exclude_target_dirs)))
+$(eval $(call combine-notice-files, xml_system_dlkm, \
+	        $(target_system_dlkm_notice_file_txt), \
+	        $(target_system_dlkm_notice_file_xml), \
+	        "Notices for files contained in the system_dlkm filesystem image in this directory:", \
+	        $(TARGET_OUT_NOTICE_FILES), \
+	        $(license_modules_system_dlkm), \
+	        $(exclude_target_dirs)))
 
 $(target_notice_file_xml_gz): $(target_notice_file_xml) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
@@ -1565,6 +1579,8 @@
 	$(hide) $(MINIGZIP) -9 < $< > $@
 $(target_odm_dlkm_notice_file_xml_gz): $(target_odm_dlkm_notice_file_xml) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
+$(target_system_dlkm_notice_file_xml_gz): $(target_system_dlkm_notice_file_xml) | $(MINIGZIP)
+	$(hide) $(MINIGZIP) -9 < $< > $@
 $(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz)
 	$(copy-file-to-target)
 $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
@@ -1579,6 +1595,8 @@
 	$(copy-file-to-target)
 $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
+$(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
+	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_notice_file_xml))
 $(call declare-0p-target,$(target_notice_file_xml_gz))
@@ -1594,6 +1612,8 @@
 $(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
 $(call declare-0p-target,$(target_odm_dlkm_notice_file_xml))
 $(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
+$(call declare-0p-target,$(target_system_dlkm_notice_file_xml))
+$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_notice_html_or_xml_gz))
 $(call declare-0p-target,$(installed_vendor_notice_xml_gz))
 $(call declare-0p-target,$(installed_product_notice_xml_gz))
@@ -1601,6 +1621,7 @@
 $(call declare-0p-target,$(installed_odm_notice_xml_gz))
 $(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
 $(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
+$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
 
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
@@ -1609,19 +1630,11 @@
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
 endif # PRODUCT_NOTICE_SPLIT
 
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
 
-$(eval $(call combine-notice-files, html, \
-	        $(tools_notice_file_txt), \
-	        $(tools_notice_file_html), \
-	        "Notices for files contained in the tools directory:", \
-	        $(HOST_OUT_NOTICE_FILES), \
-	        $(ALL_DEFAULT_INSTALLED_MODULES) \
-	        $(winpthreads_notice_file), \
-	        $(exclude_target_dirs)))
-
 endif  # TARGET_BUILD_APPS
 
 # The kernel isn't really a module, so to get its module file in there, we
@@ -1674,6 +1687,8 @@
     $(MKE2FS_CONF) \
     $(MKEXTUSERIMG)
 
+$(call declare-1p-target,$(MKE2FS_CONF),system/extras)
+
 ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
 INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG)
 endif
@@ -1686,8 +1701,9 @@
     $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE) \
     $(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE) \
     $(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
+    $(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
   ,erofs),)
-INTERNAL_USERIMAGES_DEPS += $(MKEROFSUSERIMG)
+INTERNAL_USERIMAGES_DEPS += $(MKEROFS)
 BOARD_EROFS_COMPRESSOR ?= "lz4hc,9"
 endif
 
@@ -1699,6 +1715,7 @@
     $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE) \
     $(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE) \
     $(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
+    $(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
   ,squashfs),)
 INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
 endif
@@ -1770,7 +1787,7 @@
 endef
 
 # $(1): the path of the output dictionary file
-# $(2): a subset of "system vendor cache userdata product system_ext oem odm vendor_dlkm odm_dlkm"
+# $(2): a subset of "system vendor cache userdata product system_ext oem odm vendor_dlkm odm_dlkm system_dlkm"
 # $(3): additional "key=value" pairs to append to the dictionary file.
 define generate-image-prop-dictionary
 $(if $(filter $(2),system),\
@@ -1814,6 +1831,9 @@
 $(if $(filter $(2),odm_dlkm),\
     $(call add-common-ro-flags-to-image-props,odm_dlkm,$(1))
 )
+$(if $(filter $(2),system_dlkm),\
+    $(call add-common-ro-flags-to-image-props,system_dlkm,$(1))
+)
 $(if $(filter $(2),oem),\
     $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
     $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
@@ -1846,6 +1866,7 @@
 $(if $(PRODUCT_SYSTEM_EXT_VERITY_PARTITION),$(hide) echo "system_ext_verity_block_device=$(PRODUCT_SYSTEM_EXT_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_VENDOR_DLKM_VERITY_PARTITION),$(hide) echo "vendor_dlkm_verity_block_device=$(PRODUCT_VENDOR_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_ODM_DLKM_VERITY_PARTITION),$(hide) echo "odm_dlkm_verity_block_device=$(PRODUCT_ODM_DLKM_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION),$(hide) echo "system_dlkm_verity_block_device=$(PRODUCT_SYSTEM_DLKM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCT_SUPPORTS_VBOOT)" >> $(1))
 $(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_key=$(PRODUCT_VBOOT_SIGNING_KEY)" >> $(1))
 $(if $(PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
@@ -1910,6 +1931,14 @@
         $(hide) echo "avb_odm_dlkm_key_path=$(BOARD_AVB_ODM_DLKM_KEY_PATH)" >> $(1)
         $(hide) echo "avb_odm_dlkm_algorithm=$(BOARD_AVB_ODM_DLKM_ALGORITHM)" >> $(1)
         $(hide) echo "avb_odm_dlkm_rollback_index_location=$(BOARD_AVB_ODM_DLKM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_system_dlkm_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(hide) echo "avb_system_dlkm_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),\
+    $(if $(BOARD_AVB_SYSTEM_DLKM_KEY_PATH),\
+        $(hide) echo "avb_system_dlkm_key_path=$(BOARD_AVB_SYSTEM_DLKM_KEY_PATH)" >> $(1)
+        $(hide) echo "avb_system_dlkm_algorithm=$(BOARD_AVB_SYSTEM_DLKM_ALGORITHM)" >> $(1)
+        $(hide) echo "avb_system_dlkm_rollback_index_location=$(BOARD_SYSTEM_SYSTEM_DLKM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
@@ -1953,6 +1982,9 @@
 ifdef BUILDING_ODM_DLKM_IMAGE
   PROP_DICTIONARY_IMAGES += odm_dlkm
 endif
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+  PROP_DICTIONARY_IMAGES += system_dlkm
+endif
 define generate-userimage-prop-dictionary
   $(call generate-image-prop-dictionary,$(1),$(PROP_DICTIONARY_IMAGES),$(2))
 endef
@@ -2355,8 +2387,8 @@
     $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))))
   $(if $(filter true,$(BOARD_AVB_ENABLE)), \
     $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
-      $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(call get-bootimage-partition-size,$(1),boot) --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS),\
-      $(AVBTOOL) add_hash_footer --image $(1) --partition_size $(BOARD_RECOVERYIMAGE_PARTITION_SIZE) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
+      $(AVBTOOL) add_hash_footer --image $(1) $(call get-partition-size-argument,$(call get-bootimage-partition-size,$(1),boot)) --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS),\
+      $(AVBTOOL) add_hash_footer --image $(1) $(call get-partition-size-argument,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)) --partition_name recovery $(INTERNAL_AVB_RECOVERY_SIGNING_ARGS) $(BOARD_AVB_RECOVERY_ADD_HASH_FOOTER_ARGS)))
 endef
 
 recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
@@ -2522,7 +2554,7 @@
 $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),$(2))))
 $(AVBTOOL) add_hash_footer \
   --image $(1) \
-  --partition_size $(call get-bootimage-partition-size,$(1),$(2))\
+  $(call get-partition-size-argument,$(call get-bootimage-partition-size,$(1),$(2)))\
   --partition_name boot $(INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS) \
   $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),$(2)))
@@ -2610,7 +2642,7 @@
 $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)))
 $(AVBTOOL) add_hash_footer \
   --image $(1) \
-  --partition_size $(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE) \
+  $(call get-partition-size-argument,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE)) \
   --partition_name vendor_boot $(INTERNAL_AVB_VENDOR_BOOT_TEST_SIGNING_ARGS) \
   $(BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS)
 $(call assert-max-image-size,$(1),$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
@@ -2649,6 +2681,8 @@
 	$(hide) $(foreach line,$(ADDITIONAL_TEST_HARNESS_PROPERTIES), \
 	          echo "$(line)" >> $@;)
 
+$(call declare-1p-target,$(INTERNAL_TEST_HARNESS_RAMDISK_ADB_DEBUG_PROP_TARGET))
+
 INTERNAL_TEST_HARNESS_RAMDISK_FILES := $(filter $(TARGET_TEST_HARNESS_RAMDISK_OUT)/%, \
     $(INTERNAL_TEST_HARNESS_RAMDISK_ADB_DEBUG_PROP_TARGET) \
     $(ALL_GENERATED_SOURCES) \
@@ -2826,6 +2860,8 @@
 $(FSVERITY_APK_OUT): PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
 $(FSVERITY_APK_OUT): PRIVATE_AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
 $(FSVERITY_APK_OUT): PRIVATE_MIN_SDK_VERSION := $(DEFAULT_APP_TARGET_SDK)
+$(FSVERITY_APK_OUT): PRIVATE_VERSION_CODE := $(PLATFORM_SDK_VERSION)
+$(FSVERITY_APK_OUT): PRIVATE_VERSION_NAME := $(APPS_DEFAULT_VERSION_NAME)
 $(FSVERITY_APK_OUT): PRIVATE_APKSIGNER := $(HOST_OUT_EXECUTABLES)/apksigner
 $(FSVERITY_APK_OUT): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_PATH)
 $(FSVERITY_APK_OUT): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
@@ -2839,6 +2875,8 @@
     $(fsverity-metadata-targets)
 	$< --fsverity-path $(PRIVATE_FSVERITY) --aapt2-path $(PRIVATE_AAPT2) \
 	    --min-sdk-version $(PRIVATE_MIN_SDK_VERSION) \
+	    --version-code $(PRIVATE_VERSION_CODE) \
+	    --version-name $(PRIVATE_VERSION_NAME) \
 	    --apksigner-path $(PRIVATE_APKSIGNER) --apk-key-path $(PRIVATE_KEY) \
 	    --apk-manifest-path $(PRIVATE_MANIFEST) --framework-res $(PRIVATE_FRAMEWORK_RES) \
 	    --output $@ \
@@ -2876,7 +2914,7 @@
 # On devices with a system_dlkm partition,
 # - /system/lib/modules is a symlink to a directory that stores system DLKMs.
 # - The system_dlkm partition is mounted at /system_dlkm at runtime.
-ifdef BOARD_USES_SYSTEM_DLKM_PARTITION
+ifdef BOARD_USES_SYSTEM_DLKMIMAGE
   INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
 endif
 
@@ -2902,6 +2940,9 @@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(LINKER_CONFIG_PATH_system_linker_config)\
 	  --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
 
+$(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
+$(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(LINKER_CONFIG_PATH_system_linker_config))
+
 FULL_SYSTEMIMAGE_DEPS += $(SYSTEM_LINKER_CONFIG)
 
 # installed file list
@@ -3155,6 +3196,8 @@
 ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)
 $(INSTALLED_SYSTEM_OTHER_ODEX_MARKER):
 	$(hide) touch $@
+
+$(call declare-0p-target,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER))
 endif
 
 INTERNAL_SYSTEMOTHERIMAGE_FILES := \
@@ -3571,6 +3614,60 @@
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_ODM_DLKMIMAGE),$(INSTALLED_ODM_DLKMIMAGE_TARGET)))
 endif
 
+# -----------------------------------------------------------------
+# system_dlkm partition image
+
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+
+INTERNAL_SYSTEM_DLKMIMAGE_FILES := \
+    $(filter $(TARGET_OUT_SYSTEM_DLKM)/%,\
+      $(ALL_DEFAULT_INSTALLED_MODULES))
+
+INSTALLED_FILES_FILE_SYSTEM_DLKM := $(PRODUCT_OUT)/installed-files-system_dlkm.txt
+INSTALLED_FILES_JSON_SYSTEM_DLKM := $(INSTALLED_FILES_FILE_SYSTEM_DLKM:.txt=.json)
+$(INSTALLED_FILES_FILE_SYSTEM_DLKM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEM_DLKM)
+$(INSTALLED_FILES_FILE_SYSTEM_DLKM): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
+	@echo Installed file list: $@
+	mkdir -p $(dir $@)
+	if [ -d "$(BOARD_SYSTEM_DLKM_SRC)" ]; then rsync -rupE $(BOARD_SYSTEM_DLKM_SRC)/ $(TARGET_OUT_SYSTEM_DLKM); fi
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_SYSTEM_DLKM) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+
+system_dlkmimage_intermediates := \
+    $(call intermediates-dir-for,PACKAGING,system_dlkm)
+BUILT_SYSTEM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/system_dlkm.img
+define build-system_dlkmimage-target
+  $(call pretty,"Target system_dlkm fs image: $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)")
+  @mkdir -p $(TARGET_OUT_SYSTEM_DLKM)
+  @mkdir -p $(system_dlkmimage_intermediates) && rm -rf $(system_dlkmimage_intermediates)/system_dlkm_image_info.txt
+  $(call generate-image-prop-dictionary, $(system_dlkmimage_intermediates)/system_dlkm_image_info.txt, \
+	  system_dlkm, skip_fsck=true)
+  PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH \
+      $(BUILD_IMAGE) \
+          $(TARGET_OUT_SYSTEM_DLKM) $(system_dlkmimage_intermediates)/system_dlkm_image_info.txt \
+          $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) $(TARGET_OUT)
+  $(call assert-max-image-size,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE))
+endef
+
+# We just build this directly to the install location.
+INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(BUILT_SYSTEM_DLKMIMAGE_TARGET)
+$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET): \
+    $(INTERNAL_USERIMAGES_DEPS) \
+    $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) \
+    $(INSTALLED_FILES_FILE_SYSTEM_DLKM)
+	$(build-system_dlkmimage-target)
+
+.PHONY: system_dlkmimage-nodeps sdnod
+system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
+	$(build-system_dlkmimage-target)
+
+sync: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+
+else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/system_dlkm.img
+$(eval $(call copy-one-file,$(BOARD_PREBUILT_SYSTEM_DLKMIMAGE),$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)))
+endif
 
 # -----------------------------------------------------------------
 # dtbo image
@@ -3582,7 +3679,7 @@
 	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
-	    --partition_size $(BOARD_DTBOIMG_PARTITION_SIZE) \
+	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
 	    --partition_name dtbo $(INTERNAL_AVB_DTBO_SIGNING_ARGS) \
 	    $(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)
 else
@@ -3596,26 +3693,32 @@
 # Protected VM firmware image
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
+INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
 INTERNAL_PREBUILT_PVMFWIMAGE := packages/modules/Virtualization/pvmfw/pvmfw.img
+INTERNAL_PVMFW_EMBEDDED_AVBKEY := external/avb/test/data/testkey_rsa4096_pub.bin
 
 ifdef BOARD_PREBUILT_PVMFWIMAGE
-BUILT_PVMFWIMAGE_TARGET := $(BOARD_PREBUILT_PVMFWIMAGE)
-else ifeq ($(BUILDING_PVMFW_IMAGE),true)
-BUILT_PVMFWIMAGE_TARGET := $(INTERNAL_PREBUILT_PVMFWIMAGE)
+PREBUILT_PVMFWIMAGE_TARGET := $(BOARD_PREBUILT_PVMFWIMAGE)
+else
+PREBUILT_PVMFWIMAGE_TARGET := $(INTERNAL_PREBUILT_PVMFWIMAGE)
 endif
 
 ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_PVMFWIMAGE_TARGET): $(BUILT_PVMFWIMAGE_TARGET) $(AVBTOOL) $(BOARD_AVB_PVMFW_KEY_PATH)
-	cp $(BUILT_PVMFWIMAGE_TARGET) $@
+$(INSTALLED_PVMFWIMAGE_TARGET): $(PREBUILT_PVMFWIMAGE_TARGET) $(AVBTOOL) $(BOARD_AVB_PVMFW_KEY_PATH)
+	cp $< $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
-	    --partition_size $(BOARD_PVMFWIMAGE_PARTITION_SIZE) \
+	    $(call get-partition-size-argument,$(BOARD_PVMFWIMAGE_PARTITION_SIZE)) \
 	    --partition_name pvmfw $(INTERNAL_AVB_PVMFW_SIGNING_ARGS) \
 	    $(BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS)
 else
-$(eval $(call copy-one-file,$(BUILT_PVMFWIMAGE_TARGET),$(INSTALLED_PVMFWIMAGE_TARGET)))
+$(eval $(call copy-one-file,$(PREBUILT_PVMFWIMAGE_TARGET),$(INSTALLED_PVMFWIMAGE_TARGET)))
 endif
 
+$(INSTALLED_PVMFWIMAGE_TARGET): $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET)
+
+$(eval $(call copy-one-file,$(INTERNAL_PVMFW_EMBEDDED_AVBKEY),$(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET)))
+
 endif # BOARD_USES_PVMFWIMAGE
 
 # Returns a list of image targets corresponding to the given list of partitions. For example, it
@@ -3651,7 +3754,7 @@
           --image $(3) \
           --key $(BOARD_AVB_$(call to-upper,$(2))_KEY_PATH) \
           --algorithm $(BOARD_AVB_$(call to-upper,$(2))_ALGORITHM) \
-          --partition_size $(BOARD_AVB_$(call to-upper,$(2))_PARTITION_SIZE) \
+          $(call get-partition-size-argument,$(BOARD_AVB_$(call to-upper,$(2))_PARTITION_SIZE)) \
           --partition_name $(2) \
           $(INTERNAL_AVB_CUSTOMIMAGES_SIGNING_ARGS) \
           $(BOARD_AVB_$(call to-upper,$(2))_ADD_HASHTREE_FOOTER_ARGS)
@@ -3740,8 +3843,7 @@
     --prop com.android.build.system_ext.security_patch:$(PLATFORM_SECURITY_PATCH)
 
 BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
-    --prop com.android.build.boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
-    --prop com.android.build.boot.os_version:$(PLATFORM_VERSION_LAST_STABLE)
+    --prop com.android.build.boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
 
 BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.init_boot.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
@@ -3769,6 +3871,10 @@
     --prop com.android.build.odm_dlkm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
     --prop com.android.build.odm_dlkm.os_version:$(PLATFORM_VERSION_LAST_STABLE)
 
+BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS += \
+    --prop com.android.build.system_dlkm.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE) \
+    --prop com.android.build.system_dlkm.os_version:$(PLATFORM_VERSION_LAST_STABLE)
+
 BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.dtbo.fingerprint:$(BUILD_FINGERPRINT_FROM_FILE)
 
@@ -3778,6 +3884,14 @@
 # The following vendor- and odm-specific images needs explicit SPL set per board.
 # TODO(b/210875415) Is this security_patch property used? Should it be removed from
 # boot.img when there is no platform ramdisk included in it?
+ifdef BOOT_OS_VERSION
+BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
+    --prop com.android.build.boot.os_version:$(BOOT_OS_VERSION)
+else
+BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
+    --prop com.android.build.boot.os_version:$(PLATFORM_VERSION_LAST_STABLE)
+endif
+
 ifdef BOOT_SECURITY_PATCH
 BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.boot.security_patch:$(BOOT_SECURITY_PATCH)
@@ -3811,18 +3925,16 @@
     --prop com.android.build.odm_dlkm.security_patch:$(ODM_DLKM_SECURITY_PATCH)
 endif
 
+ifdef SYSTEM_DLKM_SECURITY_PATCH
+BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS += \
+    --prop com.android.build.system_dlkm.security_patch:$(SYSTEM_DLKM_SECURITY_PATCH)
+endif
+
 ifdef PVMFW_SECURITY_PATCH
 BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS += \
     --prop com.android.build.pvmfw.security_patch:$(PVMFW_SECURITY_PATCH)
 endif
 
-# For upgrading devices without a init_boot partition, the init_boot footer args
-# should fallback to boot partition footer.
-ifndef INSTALLED_INIT_BOOT_IMAGE_TARGET
-BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS += \
-    $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS)
-endif
-
 BOOT_FOOTER_ARGS := BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS
 INIT_BOOT_FOOTER_ARGS := BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS
 VENDOR_BOOT_FOOTER_ARGS := BOARD_AVB_VENDOR_BOOT_ADD_HASH_FOOTER_ARGS
@@ -3836,6 +3948,7 @@
 ODM_FOOTER_ARGS := BOARD_AVB_ODM_ADD_HASHTREE_FOOTER_ARGS
 VENDOR_DLKM_FOOTER_ARGS := BOARD_AVB_VENDOR_DLKM_ADD_HASHTREE_FOOTER_ARGS
 ODM_DLKM_FOOTER_ARGS := BOARD_AVB_ODM_DLKM_ADD_HASHTREE_FOOTER_ARGS
+SYSTEM_DLKM_FOOTER_ARGS := BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS
 
 # Helper function that checks and sets required build variables for an AVB chained partition.
 # $(1): the partition to enable AVB chain, e.g., boot or system or vbmeta_system.
@@ -3944,6 +4057,10 @@
 $(eval $(call check-and-set-avb-args,odm_dlkm))
 endif
 
+ifdef INSTALLED_SYSTEM_DLKMIMAGE_TARGET
+$(eval $(call check-and-set-avb-args,system_dlkm))
+endif
+
 ifdef INSTALLED_DTBOIMAGE_TARGET
 $(eval $(call check-and-set-avb-args,dtbo))
 endif
@@ -4037,6 +4154,9 @@
   $(if $(BOARD_AVB_ODM_DLKM_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_ODM_DLKM_KEY_PATH) \
       --output $(1)/odm_dlkm.avbpubkey)
+  $(if $(BOARD_AVB_SYSTEM_DLKM_KEY_PATH),\
+    $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_SYSTEM_DLKM_KEY_PATH) \
+      --output $(1)/system_dlkm.avbpubkey)
   $(if $(BOARD_AVB_DTBO_KEY_PATH),\
     $(hide) $(AVBTOOL) extract_public_key --key $(BOARD_AVB_DTBO_KEY_PATH) \
       --output $(1)/dtbo.avbpubkey)
@@ -4125,6 +4245,7 @@
 	    $(INSTALLED_ODMIMAGE_TARGET) \
 	    $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
 	    $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+	    $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
 	    $(INSTALLED_DTBOIMAGE_TARGET) \
 	    $(INSTALLED_PVMFWIMAGE_TARGET) \
 	    $(INSTALLED_CUSTOMIMAGES_TARGET) \
@@ -4156,11 +4277,12 @@
     $(INTERNAL_ODMIMAGE_FILES) \
     $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
     $(INTERNAL_ODM_DLKMIMAGE_FILES) \
+    $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) \
 
 # -----------------------------------------------------------------
 # Check VINTF of build
 
-# Note: vendor_dlkm and odm_dlkm does not have VINTF files.
+# Note: vendor_dlkm, odm_dlkm, and system_dlkm does not have VINTF files.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 
 intermediates := $(call intermediates-dir-for,PACKAGING,check_vintf_all)
@@ -4554,11 +4676,11 @@
   mke2fs \
   mke2fs.conf \
   mkfs.erofs \
-  mkerofsimage.sh \
   mkf2fsuserimg.sh \
   mksquashfs \
   mksquashfsimage.sh \
   mkuserimg_mke2fs \
+  ota_extractor \
   ota_from_target_files \
   repack_bootimg \
   secilc \
@@ -4993,13 +5115,17 @@
 define filter-out-missing-odm_dlkm
 $(if $(INSTALLED_ODM_DLKMIMAGE_TARGET),$(1),$(filter-out odm_dlkm,$(1)))
 endef
-# Filter out vendor,vendor_dlkm,odm,odm_dlkm from the list for AOSP targets.
+define filter-out-missing-system_dlkm
+$(if $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(1),$(filter-out system_dlkm,$(1)))
+endef
+# Filter out vendor,vendor_dlkm,odm,odm_dlkm,system_dlkm from the list for AOSP targets.
 # $(1): list
 define filter-out-missing-partitions
 $(call filter-out-missing-vendor,\
   $(call filter-out-missing-vendor_dlkm,\
     $(call filter-out-missing-odm,\
-      $(call filter-out-missing-odm_dlkm,$(1)))))
+      $(call filter-out-missing-odm_dlkm,\
+        $(call filter-out-missing-system_dlkm,$(1))))))
 endef
 
 # Information related to dynamic partitions and virtual A/B. This information
@@ -5049,6 +5175,11 @@
     echo "virtual_ab=true" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_COMPRESSION)), \
     echo "virtual_ab_compression=true" >> $(1))
+# This value controls the compression algorithm used for VABC
+# valid options are defined in system/core/fs_mgr/libsnapshot/cow_writer.cpp
+# e.g. "none", "gz", "brotli"
+  $(if $(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD), \
+    echo "virtual_ab_compression_method=$(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD)" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
     echo "virtual_ab_retrofit=true" >> $(1))
 endef
@@ -5143,6 +5274,12 @@
   $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 endif
 
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+endif
+
 ifeq ($(BUILD_QEMU_IMAGES),true)
   MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
   $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
@@ -5161,6 +5298,7 @@
 	    $(INSTALLED_CACHEIMAGE_TARGET) \
 	    $(INSTALLED_DTBOIMAGE_TARGET) \
 	    $(INSTALLED_PVMFWIMAGE_TARGET) \
+	    $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) \
 	    $(INSTALLED_CUSTOMIMAGES_TARGET) \
 	    $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
 	    $(INSTALLED_KERNEL_TARGET) \
@@ -5177,6 +5315,7 @@
 	    $(PRODUCT_ODM_BASE_FS_PATH) \
 	    $(PRODUCT_VENDOR_DLKM_BASE_FS_PATH) \
 	    $(PRODUCT_ODM_DLKM_BASE_FS_PATH) \
+	    $(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH) \
 	    $(LPMAKE) \
 	    $(SELINUX_FC) \
 	    $(INSTALLED_MISC_INFO_TARGET) \
@@ -5350,6 +5489,11 @@
 	$(hide) $(call package_files-copy-root, \
 	    $(TARGET_OUT_ODM_DLKM),$(zip_root)/ODM_DLKM)
 endif
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+	@# Contents of the system_dlkm image
+	$(hide) $(call package_files-copy-root, \
+	    $(TARGET_OUT_SYSTEM_DLKM),$(zip_root)/SYSTEM_DLKM)
+endif
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
 	@# Contents of the system_other image
 	$(hide) $(call package_files-copy-root, \
@@ -5410,6 +5554,10 @@
 	$(hide) cp $(PRODUCT_ODM_DLKM_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCT_ODM_DLKM_BASE_FS_PATH))
 endif
+ifneq ($(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH),)
+	$(hide) cp $(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH))
+endif
 ifeq ($(TARGET_OTA_ALLOW_NON_AB),true)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$$PATH MKBOOTIMG=$(MKBOOTIMG) \
@@ -5421,7 +5569,7 @@
 	$(hide) cp $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
 	$(hide) cp $(TOPDIR)external/zucchini/version_info.h $(zip_root)/META/zucchini_config.txt
 	$(hide) cp $(HOST_OUT_SHARED_LIBRARIES)/liblz4.so $(zip_root)/META/liblz4.so
-	$(hide) for part in $(strip $(AB_OTA_PARTITIONS)); do \
+	$(hide) for part in $(sort $(AB_OTA_PARTITIONS)); do \
 	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
 	done
 	$(hide) for conf in $(strip $(AB_OTA_POSTINSTALL_CONFIG)); do \
@@ -5453,6 +5601,7 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_INIT_BOOT_IMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif
+
 ifndef BOARD_PREBUILT_BOOTIMAGE
 ifneq (,$(strip $(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES))))
 ifdef INSTALLED_BOOTIMAGE_TARGET
@@ -5476,16 +5625,18 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_ODM_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
+ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
 ifdef BOARD_PREBUILT_DTBOIMAGE
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
-ifdef BOARD_PREBUILT_PVMFWIMAGE
+ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
-else ifeq ($(BUILDING_PVMFW_IMAGE),true)
-	$(hide) mkdir -p $(zip_root)/IMAGES
-	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/IMAGES/
+	$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif
 ifdef BOARD_PREBUILT_BOOTLOADER
 	$(hide) mkdir -p $(zip_root)/IMAGES
@@ -5524,6 +5675,9 @@
 ifdef BUILDING_ODM_DLKM_IMAGE
 	$(hide) $(call fs_config,$(zip_root)/ODM_DLKM,odm_dlkm/) > $(zip_root)/META/odm_dlkm_filesystem_config.txt
 endif
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+	$(hide) $(call fs_config,$(zip_root)/SYSTEM_DLKM,system_dlkm/) > $(zip_root)/META/system_dlkm_filesystem_config.txt
+endif
 	@# ROOT always contains the files for the root under normal boot.
 	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -5768,12 +5922,15 @@
 #
 ifeq (true,$(CLANG_COVERAGE))
   LLVM_PROFDATA := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-profdata
+  LLVM_COV := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-cov
   LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/libc++.so.1
-  PROFDATA_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
-  $(PROFDATA_ZIP): $(SOONG_ZIP)
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION) -f $(LLVM_PROFDATA) -f $(LIBCXX)
+  # Use llvm-profdata.zip for backwards compatibility with tradefed code.
+  LLVM_COVERAGE_TOOLS_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
 
-  $(call dist-for-goals,droidcore-unbundled apps_only,$(PROFDATA_ZIP))
+  $(LLVM_COVERAGE_TOOLS_ZIP): $(SOONG_ZIP)
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION) -f $(LLVM_PROFDATA) -f $(LIBCXX) -f $(LLVM_COV)
+
+  $(call dist-for-goals,droidcore-unbundled apps_only,$(LLVM_COVERAGE_TOOLS_ZIP))
 endif
 
 # -----------------------------------------------------------------
@@ -5858,6 +6015,7 @@
     $(INSTALLED_ODMIMAGE_TARGET) \
     $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
     $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+    $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
     $(updater_dep)
 endif
 $(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage.zip)/filelist
@@ -6112,6 +6270,16 @@
 droidcore-unbundled: $(INSTALLED_QEMU_ODM_DLKMIMAGE)
 endif
 
+ifdef INSTALLED_SYSTEM_DLKMIMAGE_TARGET
+INSTALLED_QEMU_SYSTEM_DLKMIMAGE := $(PRODUCT_OUT)/system_dlkm-qemu.img
+$(INSTALLED_QEMU_SYSTEM_DLKMIMAGE): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) $(MK_QEMU_IMAGE_SH) $(SGDISK_HOST)
+	@echo Create system_dlkm-qemu.img
+	(export SGDISK=$(SGDISK_HOST); $(MK_QEMU_IMAGE_SH) $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET))
+
+system_dlkmimage: $(INSTALLED_QEMU_SYSTEM_DLKMIMAGE)
+droidcore-unbundled: $(INSTALLED_QEMU_SYSTEM_DLKMIMAGE)
+endif
+
 QEMU_VERIFIED_BOOT_PARAMS := $(PRODUCT_OUT)/VerifiedBootParams.textproto
 $(QEMU_VERIFIED_BOOT_PARAMS): $(INSTALLED_VBMETAIMAGE_TARGET) $(INSTALLED_SYSTEMIMAGE_TARGET) \
     $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH) $(AVBTOOL)
@@ -6202,7 +6370,6 @@
 
 deps := \
 	$(target_notice_file_txt) \
-	$(tools_notice_file_txt) \
 	$(OUT_DOCS)/offline-sdk-timestamp \
 	$(SDK_METADATA_FILES) \
 	$(SYMBOLS_ZIP) \
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 0befbfa..99acdab 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -36,65 +36,45 @@
 $(call add_soong_config_var,ANDROID,BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
 
-ifneq (,$(filter sdk win_sdk sdk_addon,$(MAKECMDGOALS)))
-  # The artifacts in the SDK zip are OK to build with prebuilt stubs enabled,
-  # even if prebuilt apexes are not enabled, because the system images in the
-  # SDK stub are not currently used (and will be removed: b/205008975).
-  MODULE_BUILD_FROM_SOURCE ?= false
-else ifeq (,$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
+# Default behavior for the tree wrt building modules or using prebuilts. This
+# can always be overridden by setting the environment variable
+# MODULE_BUILD_FROM_SOURCE.
+BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+
+ifneq (,$(MODULE_BUILD_FROM_SOURCE))
+  # Keep an explicit setting.
+else ifeq (,$(filter sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
   # Prebuilt module SDKs require prebuilt modules to work, and currently
   # prebuilt modules are only provided for com.google.android.xxx. If we can't
   # find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
   # and disable prebuilt SDKs. In particular this applies to AOSP builds.
+  #
+  # However, sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
+  # packages, so for those we respect the default behavior.
   MODULE_BUILD_FROM_SOURCE := true
+else ifeq (,$(filter-out modules_% mainline_modules_%,$(TARGET_PRODUCT)))
+  # Always build from source in unbundled builds using the module targets.
+  MODULE_BUILD_FROM_SOURCE := true
+else
+  MODULE_BUILD_FROM_SOURCE := $(BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE)
 endif
 
-# TODO(b/172480615): Remove when platform uses ART Module prebuilts by default.
-ifeq (,$(filter art_module,$(SOONG_CONFIG_NAMESPACES)))
-  $(call add_soong_config_namespace,art_module)
-  SOONG_CONFIG_art_module += source_build
-endif
-ifneq (,$(SOONG_CONFIG_art_module_source_build))
+ifneq (,$(ART_MODULE_BUILD_FROM_SOURCE))
   # Keep an explicit setting.
 else ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
   # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
-  SOONG_CONFIG_art_module_source_build := true
+  ART_MODULE_BUILD_FROM_SOURCE := true
 else ifeq (,$(filter-out modules_% mainline_modules_%,$(TARGET_PRODUCT)))
   # Always build from source for the module targets. This ought to be covered by
   # the TARGET_BUILD_APPS check above, but there are test builds that don't set it.
-  SOONG_CONFIG_art_module_source_build := true
-else ifeq (true,$(MODULE_BUILD_FROM_SOURCE))
-  # Build from source if other Mainline modules are.
-  SOONG_CONFIG_art_module_source_build := true
-else ifneq (,$(filter true,$(NATIVE_COVERAGE) $(CLANG_COVERAGE)))
-  # Always build ART APEXes from source in coverage builds since the prebuilts
-  # aren't built with instrumentation.
-  # TODO(b/172480617): Find another solution for this.
-  SOONG_CONFIG_art_module_source_build := true
-else ifneq (,$(SANITIZE_TARGET)$(SANITIZE_HOST))
-  # Prebuilts aren't built with sanitizers either.
-  SOONG_CONFIG_art_module_source_build := true
-  MODULE_BUILD_FROM_SOURCE := true
-else ifeq (,$(filter x86 x86_64,$(HOST_CROSS_ARCH)))
-  # We currently only provide prebuilts for x86 on host. This skips prebuilts in
-  # cuttlefish builds for ARM servers.
-  SOONG_CONFIG_art_module_source_build := true
-else ifneq (,$(filter dex2oatds dex2oats,$(PRODUCT_HOST_PACKAGES)))
-  # Some products depend on host tools that aren't available as prebuilts.
-  SOONG_CONFIG_art_module_source_build := true
-else ifeq (,$(findstring com.google.android.art,$(PRODUCT_PACKAGES)))
-  # TODO(b/192006406): There is currently no good way to control which prebuilt
-  # APEX (com.google.android.art or com.android.art) gets picked for deapexing
-  # to provide dex jars for hiddenapi and dexpreopting. Instead the AOSP APEX is
-  # completely disabled, and we build from source for AOSP products.
-  SOONG_CONFIG_art_module_source_build := true
+  ART_MODULE_BUILD_FROM_SOURCE := true
 else
-  # This sets the default for building ART APEXes from source rather than
-  # prebuilts (in packages/modules/ArtPrebuilt and prebuilt/module_sdk/art) in
-  # all other platform builds.
-  SOONG_CONFIG_art_module_source_build ?= true
+  # Do the same as other modules by default.
+  ART_MODULE_BUILD_FROM_SOURCE := $(MODULE_BUILD_FROM_SOURCE)
 endif
 
+$(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
+
 # Apex build mode variables
 ifdef APEX_BUILD_FOR_PRE_S_DEVICES
 $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index cec7792..e26f456 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -875,6 +875,16 @@
 endif  # LOCAL_UNINSTALLABLE_MODULE
 endif  # LOCAL_COMPATIBILITY_SUITE
 
+my_supported_variant :=
+ifeq ($(my_host_cross),true)
+  my_supported_variant := HOST_CROSS
+else
+  ifdef LOCAL_IS_HOST_MODULE
+    my_supported_variant := HOST
+  else
+    my_supported_variant := DEVICE
+  endif
+endif
 ###########################################################
 ## Add test module to ALL_DISABLED_PRESUBMIT_TESTS if LOCAL_PRESUBMIT_DISABLED is set to true.
 ###########################################################
@@ -981,6 +991,9 @@
 ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS := \
     $(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
 
+ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES := \
+    $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES)
+
 ifdef LOCAL_TEST_DATA
   # Export the list of targets that are handled as data inputs and required
   # by tests at runtime. The LOCAL_TEST_DATA format is generated from below
@@ -993,6 +1006,15 @@
         $(call word-colon,2,$(f))))
 endif
 
+ifdef LOCAL_TEST_DATA_BINS
+  ALL_MODULES.$(my_register_name).TEST_DATA_BINS := \
+    $(ALL_MODULES.$(my_register_name).TEST_DATA_BINS) $(LOCAL_TEST_DATA_BINS)
+endif
+
+ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS := \
+  $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS) \
+  $(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
+
 ##########################################################################
 ## When compiling against the VNDK, add the .vendor or .product suffix to
 ## required modules.
diff --git a/core/binary.mk b/core/binary.mk
index cf47374..94e3a0f 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -32,6 +32,12 @@
   endif
 endif
 
+# Third party code has additional no-override flags.
+is_third_party :=
+ifneq ($(filter external/% hardware/% vendor/%,$(LOCAL_PATH)),)
+  is_third_party := true
+endif
+
 my_soong_problems :=
 
 # The following LOCAL_ variables will be modified in this file.
@@ -48,6 +54,10 @@
 my_cppflags := $(LOCAL_CPPFLAGS)
 my_cflags_no_override := $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
 my_cppflags_no_override := $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
+ifdef is_third_party
+    my_cflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
+    my_cppflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
+endif
 my_ldflags := $(LOCAL_LDFLAGS)
 my_ldlibs := $(LOCAL_LDLIBS)
 my_asflags := $(LOCAL_ASFLAGS)
diff --git a/core/board_config.mk b/core/board_config.mk
index 95cbe3d..97b258d 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -85,6 +85,8 @@
 _board_strip_readonly_list += BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE
 _board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_PARTITION_SIZE
 _board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE
+_board_strip_readonly_list += BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE
+_board_strip_readonly_list += BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE
 _board_strip_readonly_list += BOARD_PVMFWIMAGE_PARTITION_SIZE
 
 # Logical partitions related variables.
@@ -93,6 +95,7 @@
 _board_strip_readonly_list += BOARD_ODMIMAGE_PARTITION_RESERVED_SIZE
 _board_strip_readonly_list += BOARD_VENDOR_DLKMIMAGE_PARTITION_RESERVED_SIZE
 _board_strip_readonly_list += BOARD_ODM_DLKMIMAGE_PARTITION_RESERVED_SIZE
+_board_strip_readonly_list += BOARD_SYSTEM_DLKMIMAGE_PARTITION_RESERVED_SIZE
 _board_strip_readonly_list += BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE
 _board_strip_readonly_list += BOARD_SYSTEM_EXTIMAGE_PARTITION_RESERVED_SIZE
 _board_strip_readonly_list += BOARD_SUPER_PARTITION_SIZE
@@ -171,6 +174,7 @@
   BUILD_BROKEN_DUP_SYSPROP \
   BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
   BUILD_BROKEN_ENFORCE_SYSPROP_OWNER \
+  BUILD_BROKEN_INPUT_DIR_MODULES \
   BUILD_BROKEN_MISSING_REQUIRED_MODULES \
   BUILD_BROKEN_OUTSIDE_INCLUDE_DIRS \
   BUILD_BROKEN_PREBUILT_ELF_FILES \
@@ -239,6 +243,7 @@
     --mode=write -r --outdir $(OUT_DIR)/rbc \
     --boardlauncher=$(OUT_DIR)/rbc/boardlauncher.rbc \
     --input_variables=$(OUT_DIR)/rbc/make_vars_pre_board_config.mk \
+    --makefile_list=$(OUT_DIR)/.module_paths/configuration.list \
     $(board_config_mk))
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration converter failed: $(.SHELLSTATUS))
@@ -867,6 +872,40 @@
 endif
 .KATI_READONLY := BUILDING_ODM_DLKM_IMAGE
 
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_SYSTEM_DLKM
+ifeq ($(TARGET_COPY_OUT_SYSTEM_DLKM),$(_system_dlkm_path_placeholder))
+  TARGET_COPY_OUT_SYSTEM_DLKM := $(TARGET_COPY_OUT_SYSTEM)/system_dlkm
+else ifeq ($(filter system_dlkm system/system_dlkm,$(TARGET_COPY_OUT_SYSTEM_DLKM)),)
+  $(error TARGET_COPY_OUT_SYSTEM_DLKM must be either 'system_dlkm' or 'system/system_dlkm', seeing '$(TARGET_COPY_OUT_ODM_DLKM)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_system_dlkm_path_placeholder),$(TARGET_COPY_OUT_SYSTEM_DLKM),$(PRODUCT_COPY_FILES))
+
+BOARD_USES_SYSTEM_DLKMIMAGE :=
+ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+  BOARD_USES_SYSTEM_DLKMIMAGE := true
+endif
+ifdef BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE
+  BOARD_USES_SYSTEM_DLKMIMAGE := true
+endif
+$(call check_image_config,system_dlkm)
+
+BUILDING_SYSTEM_DLKM_IMAGE :=
+ifeq ($(PRODUCT_BUILD_SYSTEM_DLKM_IMAGE),)
+  ifdef BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE
+    BUILDING_SYSTEM_DLKM_IMAGE := true
+  endif
+else ifeq ($(PRODUCT_BUILD_SYSTEM_DLKM_IMAGE),true)
+  BUILDING_SYSTEM_DLKM_IMAGE := true
+  ifndef BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE
+    $(error PRODUCT_BUILD_SYSTEM_DLKM_IMAGE set to true, but BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE not defined)
+  endif
+endif
+ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+  BUILDING_SYSTEM_DLKM_IMAGE :=
+endif
+.KATI_READONLY := BUILDING_SYSTEM_DLKM_IMAGE
+
 BOARD_USES_PVMFWIMAGE :=
 ifdef BOARD_PREBUILT_PVMFWIMAGE
   BOARD_USES_PVMFWIMAGE := true
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 415334f..57f9ef8 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -264,6 +264,8 @@
 LOCAL_RESOURCE_DIR:=
 LOCAL_RLIB_LIBRARIES:=
 LOCAL_RMTYPEDEFS:=
+LOCAL_ROTATION_MIN_SDK_VERSION:=
+LOCAL_RUNTIME_LIBRARIES:=
 LOCAL_RRO_THEME:=
 LOCAL_RTTI_FLAG:=
 LOCAL_SANITIZE:=
@@ -316,6 +318,7 @@
 LOCAL_TARGET_REQUIRED_MODULES:=
 LOCAL_TEST_CONFIG:=
 LOCAL_TEST_DATA:=
+LOCAL_TEST_DATA_BINS:=
 LOCAL_TEST_MAINLINE_MODULES:=
 LOCAL_TEST_MODULE_TO_PROGUARD_WITH:=
 LOCAL_TIDY:=
@@ -358,6 +361,7 @@
 LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_ARCH):=
 LOCAL_PREBUILT_JNI_LIBS_$(TARGET_ARCH):=
 LOCAL_REQUIRED_MODULES_$(TARGET_ARCH):=
+LOCAL_RUNTIME_LIBRARIES_$(TARGET_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(TARGET_ARCH):=
 LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH):=
 LOCAL_SOONG_JNI_LIBS_SYMBOLS:=
@@ -382,6 +386,7 @@
 LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_2ND_ARCH):=
 LOCAL_PREBUILT_JNI_LIBS_$(TARGET_2ND_ARCH):=
 LOCAL_REQUIRED_MODULES_$(TARGET_2ND_ARCH):=
+LOCAL_RUNTIME_LIBRARIES_$(TARGET_2ND_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(TARGET_2ND_ARCH):=
 LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH):=
 LOCAL_SRC_FILES_EXCLUDE_$(TARGET_2ND_ARCH):=
@@ -403,6 +408,7 @@
 LOCAL_HEADER_LIBRARIES_$(HOST_ARCH):=
 LOCAL_LDFLAGS_$(HOST_ARCH):=
 LOCAL_REQUIRED_MODULES_$(HOST_ARCH):=
+LOCAL_RUNTIME_LIBRARIES_$(HOST_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(HOST_ARCH):=
 LOCAL_SRC_FILES_EXCLUDE_$(HOST_ARCH):=
 LOCAL_SRC_FILES_$(HOST_ARCH):=
@@ -422,6 +428,7 @@
 LOCAL_HEADER_LIBRARIES_$(HOST_2ND_ARCH):=
 LOCAL_LDFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_REQUIRED_MODULES_$(HOST_2ND_ARCH):=
+LOCAL_RUNTIME_LIBRARIES_$(HOST_2ND_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(HOST_2ND_ARCH):=
 LOCAL_SRC_FILES_EXCLUDE_$(HOST_2ND_ARCH):=
 LOCAL_SRC_FILES_$(HOST_2ND_ARCH):=
@@ -438,6 +445,7 @@
 LOCAL_LDFLAGS_$(HOST_OS):=
 LOCAL_LDLIBS_$(HOST_OS):=
 LOCAL_REQUIRED_MODULES_$(HOST_OS):=
+LOCAL_RUNTIME_LIBRARIES_$(HOST_OS):=
 LOCAL_SHARED_LIBRARIES_$(HOST_OS):=
 LOCAL_SRC_FILES_$(HOST_OS):=
 LOCAL_STATIC_LIBRARIES_$(HOST_OS):=
@@ -479,6 +487,8 @@
 LOCAL_MODULE_STEM_64:=
 LOCAL_MODULE_SYMLINKS_32:=
 LOCAL_MODULE_SYMLINKS_64:=
+LOCAL_RUNTIME_LIBRARIES_32:=
+LOCAL_RUNTIME_LIBRARIES_64:=
 LOCAL_SHARED_LIBRARIES_32:=
 LOCAL_SHARED_LIBRARIES_64:=
 LOCAL_SRC_FILES_32:=
diff --git a/core/combo/select.mk b/core/combo/select.mk
index 7617558..9c7e69e 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -35,7 +35,7 @@
   ,HOST_CROSS builds are not supported in Make)
 else
 
-$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD -format=gnu
+$(combo_var_prefix)GLOBAL_ARFLAGS := crsPD --format=gnu
 
 $(combo_var_prefix)STATIC_LIB_SUFFIX := .a
 
diff --git a/core/config.mk b/core/config.mk
index 72b6ec8..5ef9211 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -597,7 +597,7 @@
 FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
 MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
-MKEROFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkerofsimage.sh
+MKEROFS := $(HOST_OUT_EXECUTABLES)/mkfs.erofs
 MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
 MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
@@ -816,7 +816,7 @@
 # is made which breaks compatibility with the previous platform sepolicy version,
 # not just on every increase in PLATFORM_SDK_VERSION.  The minor version should
 # be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 31
+sepolicy_major_vers := 32
 sepolicy_minor_vers := 0
 
 ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -928,6 +928,13 @@
 endif
 endif
 
+ifneq ($(BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE),)
+ifneq ($(BOARD_SYSTEM_DLKMIMAGE_PARTITION_RESERVED_SIZE),)
+$(error Should not define BOARD_SYSTEM_DLKMIMAGE_PARTITION_SIZE and \
+    BOARD_SYSTEM_DLKMIMAGE_PARTITION_RESERVED_SIZE together)
+endif
+endif
+
 ifneq ($(BOARD_PRODUCTIMAGE_PARTITION_SIZE),)
 ifneq ($(BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE),)
 $(error Should not define BOARD_PRODUCTIMAGE_PARTITION_SIZE and \
@@ -963,7 +970,7 @@
 )
 
 # BOARD_*_PARTITION_LIST: a list of the following tokens
-valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm
+valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm system_dlkm
 $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
     $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
         $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
diff --git a/core/definitions.mk b/core/definitions.mk
index c981152..2d16fdf 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -577,6 +577,15 @@
 endef
 
 ###########################################################
+# License metadata targets corresponding to targets in $(1)
+###########################################################
+define corresponding-license-metadata
+$(strip $(eval _dir := $(call license-metadata-dir)) \
+$(foreach target, $(sort $(1)), $(_dir)/$(target).meta_lic) \
+)
+endef
+
+###########################################################
 ## License metadata build rule for my_register_name $(1)
 ###########################################################
 define license-metadata-rule
@@ -728,6 +737,22 @@
 endef
 
 ###########################################################
+## Declare that non-module targets copied from project $(1) and
+## optionally ending in $(2) have the following license
+## metadata:
+##
+## $(3) -- license kinds e.g. SPDX-license-identifier-Apache-2.0
+## $(4) -- license conditions e.g. notice by_exception_only
+## $(5) -- license text filenames (notices)
+## $(6) -- package name
+###########################################################
+define declare-copy-files-license-metadata
+$(strip \
+  $(foreach _pair,$(filter $(1)%$(2),$(PRODUCT_COPY_FILES)),$(eval $(call declare-license-metadata,$(PRODUCT_OUT)/$(call word-colon,2,$(_pair)),$(3),$(4),$(5),$(6),$(1)))) \
+)
+endef
+
+###########################################################
 ## Declare the license metadata for non-module container-type target $(1).
 ##
 ## Container-type targets are targets like .zip files that
@@ -765,6 +790,18 @@
 endef
 
 ###########################################################
+## Declare that non-module targets copied from project $(1) and
+## optionally ending in $(2) are non-copyrightable files.
+##
+## e.g. an information-only file merely listing other files.
+###########################################################
+define declare-0p-copy-files
+$(strip \
+  $(foreach _pair,$(filter $(1)%$(2),$(PRODUCT_COPY_FILES)),$(eval $(call declare-0p-target,$(PRODUCT_OUT)/$(call word-colon,2,$(_pair))))) \
+)
+endef
+
+###########################################################
 ## Declare non-module target $(1) to have a first-party license
 ## (Android Apache 2.0)
 ##
@@ -775,6 +812,15 @@
 endef
 
 ###########################################################
+## Declare that non-module targets copied from project $(1) and
+## optionally ending in $(2) are first-party licensed
+## (Android Apache 2.0)
+###########################################################
+define declare-1p-copy-files
+$(foreach _pair,$(filter $(1)%$(2),$(PRODUCT_COPY_FILES)),$(call declare-1p-target,$(PRODUCT_OUT)/$(call word-colon,2,$(_pair)),$(1)))
+endef
+
+###########################################################
 ## Declare non-module container-type target $(1) to have a
 ## first-party license (Android Apache 2.0).
 ##
@@ -828,6 +874,46 @@
 
 endef
 
+
+###########################################################
+# Returns the unique list of built license metadata files.
+###########################################################
+define all-license-metadata
+$(sort \
+  $(foreach t,$(ALL_NON_MODULES),$(if $(filter 0p,$(ALL_TARGETS.$(t).META_LIC)),, $(ALL_TARGETS.$(t).META_LIC))) \
+  $(foreach m,$(ALL_MODULES), $(ALL_MODULES.$(m).META_LIC)) \
+)
+endef
+
+###########################################################
+# Declares the rule to report all library names used in any notice files.
+###########################################################
+define report-all-notice-library-names-rule
+$(strip $(eval _all := $(call all-license-metadata)))
+
+.PHONY: reportallnoticelibrarynames
+reportallnoticelibrarynames: PRIVATE_LIST_FILE := $(call license-metadata-dir)/filelist
+reportallnoticelibrarynames: | $(COMPLIANCENOTICE_SHIPPEDLIBS)
+reportallnoticelibrarynames: $(_all)
+	@echo Reporting notice library names for at least $$(words $(_all)) license metadata files
+	$(hide) rm -f $$(PRIVATE_LIST_FILE)
+	$(hide) mkdir -p $$(dir $$(PRIVATE_LIST_FILE))
+	$(hide) find out -name '*meta_lic' -type f -printf '"%p"\n' >$$(PRIVATE_LIST_FILE)
+	$(COMPLIANCENOTICE_SHIPPEDLIBS) @$$(PRIVATE_LIST_FILE)
+endef
+
+###########################################################
+# Declares the rule to build all license metadata.
+###########################################################
+define build-all-license-metadata-rule
+$(strip $(eval _all := $(call all-license-metadata)))
+
+.PHONY: alllicensemetadata
+alllicensemetadata: $(_all)
+	@echo Building all $(words $(_all)) license metadata files
+endef
+
+
 ###########################################################
 ## Declares a license metadata build rule for ALL_MODULES
 ###########################################################
@@ -842,7 +928,9 @@
   ) \
   $(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
   $(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
-  $(eval $(call report-missing-licenses-rule)))
+  $(eval $(call report-missing-licenses-rule)) \
+  $(eval $(call report-all-notice-library-names-rule)) \
+  $(eval $(call build-all-license-metadata-rule)))
 endef
 
 ###########################################################
@@ -2986,6 +3074,8 @@
 # $(3): full path to destination
 define symlink-file
 $(eval $(_symlink-file))
+$(eval $(call declare-license-metadata,$(3),,,,,,))
+$(eval $(call declare-license-deps,$(3),$(1)))
 endef
 
 define _symlink-file
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 8232907..5c5b565 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -259,6 +259,7 @@
 # TARGET_COPY_OUT_* are all relative to the staging directory, ie PRODUCT_OUT.
 # Define them here so they can be used in product config files.
 TARGET_COPY_OUT_SYSTEM := system
+TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
 TARGET_COPY_OUT_SYSTEM_OTHER := system_other
 TARGET_COPY_OUT_DATA := data
 TARGET_COPY_OUT_ASAN := $(TARGET_COPY_OUT_DATA)/asan
@@ -278,6 +279,7 @@
 _odm_path_placeholder := ||ODM-PATH-PH||
 _vendor_dlkm_path_placeholder := ||VENDOR_DLKM-PATH-PH||
 _odm_dlkm_path_placeholder := ||ODM_DLKM-PATH-PH||
+_system_dlkm_path_placeholder := ||SYSTEM_DLKM-PATH-PH||
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
 TARGET_COPY_OUT_VENDOR_RAMDISK := vendor_ramdisk
 TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
@@ -288,6 +290,7 @@
 TARGET_COPY_OUT_ODM := $(_odm_path_placeholder)
 TARGET_COPY_OUT_VENDOR_DLKM := $(_vendor_dlkm_path_placeholder)
 TARGET_COPY_OUT_ODM_DLKM := $(_odm_dlkm_path_placeholder)
+TARGET_COPY_OUT_SYSTEM_DLKM := $(_system_dlkm_path_placeholder)
 
 # Returns the non-sanitized version of the path provided in $1.
 define get_non_asan_path
@@ -323,7 +326,7 @@
 define dump-variables-rbc
 $(file >$(OUT_DIR)/dump-variables-rbc-temp.txt,$(subst $(space),$(newline),$(.VARIABLES)))\
 $(file >$(1),\
-$(foreach v, $(shell grep -he "^[A-Z][A-Z0-9_]*$$" $(OUT_DIR)/dump-variables-rbc-temp.txt | grep -vhE "^(SOONG_.*|LOCAL_PATH|TOPDIR|PRODUCT_COPY_OUT_.*)$$"),\
+$(foreach v, $(shell grep -he "^[A-Z][A-Z0-9_]*$$" $(OUT_DIR)/dump-variables-rbc-temp.txt | grep -vhE "^(SOONG_.*|LOCAL_PATH|TOPDIR|PRODUCT_COPY_OUT_.*|TRACE_BEGIN_SOONG)$$"),\
 $(v) := $(strip $($(v)))$(newline))\
 $(foreach ns,$(SOONG_CONFIG_NAMESPACES),\
 $(foreach v,$(SOONG_CONFIG_$(ns)),\
@@ -839,6 +842,36 @@
     $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_DLKM_APPS_PRIVILEGED \
     , odm_dlkm should not contain any executables, libraries, or apps)
 
+TARGET_OUT_SYSTEM_DLKM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM_DLKM)
+
+# Unlike other partitions, system_dlkm should only contain kernel modules.
+TARGET_OUT_SYSTEM_DLKM_EXECUTABLES :=
+TARGET_OUT_SYSTEM_DLKM_OPTIONAL_EXECUTABLES :=
+TARGET_OUT_SYSTEM_DLKM_SHARED_LIBRARIES :=
+TARGET_OUT_SYSTEM_DLKM_RENDERSCRIPT_BITCODE :=
+TARGET_OUT_SYSTEM_DLKM_JAVA_LIBRARIES :=
+TARGET_OUT_SYSTEM_DLKM_APPS :=
+TARGET_OUT_SYSTEM_DLKM_APPS_PRIVILEGED :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_EXECUTABLES :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_SHARED_LIBRARIES :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_RENDERSCRIPT_BITCODE :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_APPS :=
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_APPS_PRIVILEGED :=
+$(KATI_obsolete_var \
+    TARGET_OUT_SYSTEM_DLKM_EXECUTABLES \
+    TARGET_OUT_SYSTEM_DLKM_OPTIONAL_EXECUTABLES \
+    TARGET_OUT_SYSTEM_DLKM_SHARED_LIBRARIES \
+    TARGET_OUT_SYSTEM_DLKM_RENDERSCRIPT_BITCODE \
+    TARGET_OUT_SYSTEM_DLKM_JAVA_LIBRARIES \
+    TARGET_OUT_SYSTEM_DLKM_APPS \
+    TARGET_OUT_SYSTEM_DLKM_APPS_PRIVILEGED \
+    $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_EXECUTABLES \
+    $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_SHARED_LIBRARIES \
+    $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_RENDERSCRIPT_BITCODE \
+    $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_APPS \
+    $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SYSTEM_DLKM_APPS_PRIVILEGED \
+    , system_dlkm should not contain any executables, libraries, or apps)
+
 TARGET_OUT_PRODUCT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_PRODUCT)
 TARGET_OUT_PRODUCT_EXECUTABLES := $(TARGET_OUT_PRODUCT)/bin
 .KATI_READONLY := TARGET_OUT_PRODUCT
@@ -951,6 +984,9 @@
 TARGET_VENDOR_DEBUG_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK)
 TARGET_TEST_HARNESS_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_TEST_HARNESS_RAMDISK)
 
+TARGET_SYSTEM_DLKM_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM_DLKM)
+.KATI_READONLY := TARGET_SYSTEM_DLKM_OUT
+
 TARGET_VENDOR_RAMDISK_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR_RAMDISK)
 
 TARGET_ROOT_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ROOT)
diff --git a/core/main.mk b/core/main.mk
index 56007e2..d5dc49f 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -142,11 +142,6 @@
 #
 # -----------------------------------------------------------------
 # Add the product-defined properties to the build properties.
-ifdef PRODUCT_SHIPPING_API_LEVEL
-ADDITIONAL_SYSTEM_PROPERTIES += \
-  ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
-endif
-
 ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true)
   ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES)
 else
@@ -240,7 +235,7 @@
 ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_arch=$(TARGET_2ND_ARCH)
 ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.2nd_cpu_variant=$(TARGET_2ND_CPU_VARIANT_RUNTIME)
 
-ADDITIONAL_VENDOR_PROPERTIES += persist.sys.dalvik.vm.lib.2=
+ADDITIONAL_VENDOR_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
 ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT_RUNTIME)
 ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
   ADDITIONAL_VENDOR_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
@@ -348,7 +343,7 @@
 ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)
 
 ifeq ($(AB_OTA_UPDATER),true)
-ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(strip $(AB_OTA_PARTITIONS)))
+ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 endif
 
 # -----------------------------------------------------------------
@@ -1209,7 +1204,8 @@
         $(subst $(_odm_path_placeholder),$(TARGET_COPY_OUT_ODM),\
           $(subst $(_vendor_dlkm_path_placeholder),$(TARGET_COPY_OUT_VENDOR_DLKM),\
             $(subst $(_odm_dlkm_path_placeholder),$(TARGET_COPY_OUT_ODM_DLKM),\
-              $(foreach p,$(1),$(call append-path,$(PRODUCT_OUT),$(p)$(2)))))))))
+              $(subst $(_system_dlkm_path_placeholder),$(TARGET_COPY_OUT_SYSTEM_DLKM),\
+                $(foreach p,$(1),$(call append-path,$(PRODUCT_OUT),$(p)$(2))))))))))
 endef
 
 # Returns modules included automatically as a result of certain BoardConfig
@@ -1594,6 +1590,9 @@
 .PHONY: odm_dlkmimage
 odm_dlkmimage: $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 
+.PHONY: system_dlkmimage
+system_dlkmimage: $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+
 .PHONY: systemotherimage
 systemotherimage: $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
 
@@ -1604,7 +1603,7 @@
 bootimage: $(INSTALLED_BOOTIMAGE_TARGET)
 
 .PHONY: initbootimage
-bootimage: $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+initbootimage: $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
 
 ifeq (true,$(PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST))
 $(call dist-for-goals, bootimage, $(INSTALLED_BOOTIMAGE_TARGET))
@@ -1653,6 +1652,7 @@
     $(INSTALLED_ODMIMAGE_TARGET) \
     $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
     $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+    $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
     $(INSTALLED_SUPERIMAGE_EMPTY_TARGET) \
     $(INSTALLED_PRODUCTIMAGE_TARGET) \
     $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \
@@ -1668,6 +1668,8 @@
     $(INSTALLED_FILES_JSON_VENDOR_DLKM) \
     $(INSTALLED_FILES_FILE_ODM_DLKM) \
     $(INSTALLED_FILES_JSON_ODM_DLKM) \
+    $(INSTALLED_FILES_FILE_SYSTEM_DLKM) \
+    $(INSTALLED_FILES_JSON_SYSTEM_DLKM) \
     $(INSTALLED_FILES_FILE_PRODUCT) \
     $(INSTALLED_FILES_JSON_PRODUCT) \
     $(INSTALLED_FILES_FILE_SYSTEM_EXT) \
@@ -1818,6 +1820,8 @@
     $(INSTALLED_FILES_JSON_VENDOR_DLKM) \
     $(INSTALLED_FILES_FILE_ODM_DLKM) \
     $(INSTALLED_FILES_JSON_ODM_DLKM) \
+    $(INSTALLED_FILES_FILE_SYSTEM_DLKM) \
+    $(INSTALLED_FILES_JSON_SYSTEM_DLKM) \
     $(INSTALLED_FILES_FILE_PRODUCT) \
     $(INSTALLED_FILES_JSON_PRODUCT) \
     $(INSTALLED_FILES_FILE_SYSTEM_EXT) \
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 4edbbb8..4ebbe2e 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -11,10 +11,6 @@
 
 ifneq (,$(strip $(LOCAL_LICENSE_PACKAGE_NAME)))
 license_package_name:=$(strip $(LOCAL_LICENSE_PACKAGE_NAME))
-else ifdef my_register_name
-license_package_name:=$(my_register_name)
-else
-license_package_name:=$(strip $(LOCAL_MODULE))
 endif
 
 ifneq (,$(strip $(LOCAL_LICENSE_INSTALL_MAP)))
diff --git a/core/product-graph.mk b/core/product-graph.mk
index d425b22..6d51db1 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -14,13 +14,10 @@
 # limitations under the License.
 #
 
-# the foreach and the if remove the single space entries that creep in because of the evals
+# the sort also acts as a strip to remove the single space entries that creep in because of the evals
 define gather-all-products
-$(sort $(foreach p, \
-	$(eval _all_products_visited := )
-  $(call all-products-inner, $(PARENT_PRODUCT_FILES)) \
-	, $(if $(strip $(p)),$(strip $(p)),)) \
-)
+$(eval _all_products_visited := )\
+$(sort $(call all-products-inner, $(PARENT_PRODUCT_FILES)))
 endef
 
 define all-products-inner
@@ -72,7 +69,7 @@
 $(hide) echo \"$(1)\" [ \
 label=\"$(dir $(1))\\n$(notdir $(1))\\n\\n$(subst $(close_parenthesis),,$(subst $(open_parethesis),,$(call get-product-var,$(1),PRODUCT_MODEL)))\\n$(call get-product-var,$(1),PRODUCT_DEVICE)\" \
 style=\"filled\" fillcolor=\"$(strip $(call node-color,$(1)))\" \
-colorscheme=\"svg\" fontcolor=\"darkblue\" href=\"products/$(1).html\" \
+colorscheme=\"svg\" fontcolor=\"darkblue\" \
 ] >> $(2)
 
 endef
@@ -95,66 +92,7 @@
 	false
 endif
 
-# Evaluates to the name of the product file
-# $(1) product file
-define product-debug-filename
-$(OUT_DIR)/products/$(strip $(1)).html
-endef
-
-# Makes a rule for the product debug info
-# $(1) product file
-define transform-product-debug
-$(OUT_DIR)/products/$(strip $(1)).txt: $(this_makefile)
-	@echo Product debug info file: $$@
-	$(hide) rm -f $$@
-	$(hide) mkdir -p $$(dir $$@)
-	$(hide) echo 'FILE=$(strip $(1))' >> $$@
-	$(hide) echo 'PRODUCT_NAME=$(call get-product-var,$(1),PRODUCT_NAME)' >> $$@
-	$(hide) echo 'PRODUCT_MODEL=$(call get-product-var,$(1),PRODUCT_MODEL)' >> $$@
-	$(hide) echo 'PRODUCT_LOCALES=$(call get-product-var,$(1),PRODUCT_LOCALES)' >> $$@
-	$(hide) echo 'PRODUCT_AAPT_CONFIG=$(call get-product-var,$(1),PRODUCT_AAPT_CONFIG)' >> $$@
-	$(hide) echo 'PRODUCT_AAPT_PREF_CONFIG=$(call get-product-var,$(1),PRODUCT_AAPT_PREF_CONFIG)' >> $$@
-	$(hide) echo 'PRODUCT_PACKAGES=$(call get-product-var,$(1),PRODUCT_PACKAGES)' >> $$@
-	$(hide) echo 'PRODUCT_DEVICE=$(call get-product-var,$(1),PRODUCT_DEVICE)' >> $$@
-	$(hide) echo 'PRODUCT_MANUFACTURER=$(call get-product-var,$(1),PRODUCT_MANUFACTURER)' >> $$@
-	$(hide) echo 'PRODUCT_PROPERTY_OVERRIDES=$(call get-product-var,$(1),PRODUCT_PROPERTY_OVERRIDES)' >> $$@
-	$(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$(call get-product-var,$(1),PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@
-	$(hide) echo 'PRODUCT_SYSTEM_DEFAULT_PROPERTIES=$(call get-product-var,$(1),PRODUCT_SYSTEM_DEFAULT_PROPERTIES)' >> $$@
-	$(hide) echo 'PRODUCT_PRODUCT_PROPERTIES=$(call get-product-var,$(1),PRODUCT_PRODUCT_PROPERTIES)' >> $$@
-	$(hide) echo 'PRODUCT_SYSTEM_EXT_PROPERTIES=$(call get-product-var,$(1),PRODUCT_SYSTEM_EXT_PROPERTIES)' >> $$@
-	$(hide) echo 'PRODUCT_ODM_PROPERTIES=$(call get-product-var,$(1),PRODUCT_ODM_PROPERTIES)' >> $$@
-	$(hide) echo 'PRODUCT_CHARACTERISTICS=$(call get-product-var,$(1),PRODUCT_CHARACTERISTICS)' >> $$@
-	$(hide) echo 'PRODUCT_COPY_FILES=$(call get-product-var,$(1),PRODUCT_COPY_FILES)' >> $$@
-	$(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$(call get-product-var,$(1),PRODUCT_OTA_PUBLIC_KEYS)' >> $$@
-	$(hide) echo 'PRODUCT_EXTRA_OTA_KEYS=$(call get-product-var,$(1),PRODUCT_EXTRA_OTA_KEYS)' >> $$@
-	$(hide) echo 'PRODUCT_EXTRA_RECOVERY_KEYS=$(call get-product-var,$(1),PRODUCT_EXTRA_RECOVERY_KEYS)' >> $$@
-	$(hide) echo 'PRODUCT_PACKAGE_OVERLAYS=$(call get-product-var,$(1),PRODUCT_PACKAGE_OVERLAYS)' >> $$@
-	$(hide) echo 'DEVICE_PACKAGE_OVERLAYS=$(call get-product-var,$(1),DEVICE_PACKAGE_OVERLAYS)' >> $$@
-	$(hide) echo 'PRODUCT_SDK_ADDON_NAME=$(call get-product-var,$(1),PRODUCT_SDK_ADDON_NAME)' >> $$@
-	$(hide) echo 'PRODUCT_SDK_ADDON_COPY_FILES=$(call get-product-var,$(1),PRODUCT_SDK_ADDON_COPY_FILES)' >> $$@
-	$(hide) echo 'PRODUCT_SDK_ADDON_COPY_MODULES=$(call get-product-var,$(1),PRODUCT_SDK_ADDON_COPY_MODULES)' >> $$@
-	$(hide) echo 'PRODUCT_SDK_ADDON_DOC_MODULES=$(call get-product-var,$(1),PRODUCT_SDK_ADDON_DOC_MODULES)' >> $$@
-	$(hide) echo 'PRODUCT_DEFAULT_WIFI_CHANNELS=$(call get-product-var,$(1),PRODUCT_DEFAULT_WIFI_CHANNELS)' >> $$@
-	$(hide) echo 'PRODUCT_DEFAULT_DEV_CERTIFICATE=$(call get-product-var,$(1),PRODUCT_DEFAULT_DEV_CERTIFICATE)' >> $$@
-	$(hide) echo 'PRODUCT_MAINLINE_SEPOLICY_DEV_CERTIFICATES=$(call get-product-var,$(1),PRODUCT_MAINLINE_SEPOLICY_DEV_CERTIFICATES)' >> $$@
-	$(hide) echo 'PRODUCT_RESTRICT_VENDOR_FILES=$(call get-product-var,$(1),PRODUCT_RESTRICT_VENDOR_FILES)' >> $$@
-	$(hide) echo 'PRODUCT_VENDOR_KERNEL_HEADERS=$(call get-product-var,$(1),PRODUCT_VENDOR_KERNEL_HEADERS)' >> $$@
-
-$(call product-debug-filename, $(p)): \
-			$(OUT_DIR)/products/$(strip $(1)).txt \
-			build/make/tools/product_debug.py \
-			$(this_makefile)
-	@echo Product debug html file: $$@
-	$(hide) mkdir -p $$(dir $$@)
-	$(hide) cat $$< | build/make/tools/product_debug.py > $$@
-endef
-
 ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
-product_debug_files:=
-$(foreach p,$(all_products), \
-			$(eval $(call transform-product-debug, $(p))) \
-			$(eval product_debug_files += $(call product-debug-filename, $(p))) \
-   )
 
 .PHONY: product-graph
 product-graph: $(products_graph)
diff --git a/core/product.mk b/core/product.mk
index 7192226..032ca6b 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -14,98 +14,6 @@
 # limitations under the License.
 #
 
-#
-# Functions for including AndroidProducts.mk files
-# PRODUCT_MAKEFILES is set up in AndroidProducts.mks.
-# Format of PRODUCT_MAKEFILES:
-# <product_name>:<path_to_the_product_makefile>
-# If the <product_name> is the same as the base file name (without dir
-# and the .mk suffix) of the product makefile, "<product_name>:" can be
-# omitted.
-
-#
-# Returns the list of all AndroidProducts.mk files.
-# $(call ) isn't necessary.
-#
-define _find-android-products-files
-$(file <$(OUT_DIR)/.module_paths/AndroidProducts.mk.list) \
-  $(SRC_TARGET_DIR)/product/AndroidProducts.mk
-endef
-
-#
-# For entries returned by get-product-makefiles, decode an entry to a short
-# product name. These either may be in the form of <name>:path/to/file.mk or
-# path/to/<name>.mk
-# $(1): The entry to decode
-#
-# Returns two words:
-#   <name> <file>
-#
-define _decode-product-name
-$(strip \
-  $(eval _cpm_words := $(subst :,$(space),$(1))) \
-  $(if $(word 2,$(_cpm_words)), \
-    $(wordlist 1,2,$(_cpm_words)), \
-    $(basename $(notdir $(1))) $(1)))
-endef
-
-#
-# Validates the new common lunch choices -- ensures that they're in an
-# appropriate form, and are paired with definitions of their products.
-# $(1): The new list of COMMON_LUNCH_CHOICES
-# $(2): The new list of PRODUCT_MAKEFILES
-#
-define _validate-common-lunch-choices
-$(strip $(foreach choice,$(1),\
-  $(eval _parts := $(subst -,$(space),$(choice))) \
-  $(if $(call math_lt,$(words $(_parts)),2), \
-    $(error $(LOCAL_DIR): $(choice): Invalid lunch choice)) \
-  $(if $(call math_gt_or_eq,$(words $(_parts)),4), \
-    $(error $(LOCAL_DIR): $(choice): Invalid lunch choice)) \
-  $(if $(filter-out eng userdebug user,$(word 2,$(_parts))), \
-    $(error $(LOCAL_DIR): $(choice): Invalid variant: $(word 2,$(_parts)))) \
-  $(if $(filter-out $(foreach p,$(2),$(call _decode-product-name,$(p))),$(word 1,$(_parts))), \
-    $(error $(LOCAL_DIR): $(word 1,$(_parts)): Product not defined in this file)) \
-  ))
-endef
-
-#
-# Returns the sorted concatenation of PRODUCT_MAKEFILES
-# variables set in the given AndroidProducts.mk files.
-# $(1): the list of AndroidProducts.mk files.
-#
-# As a side-effect, COMMON_LUNCH_CHOICES will be set to a
-# union of all of the COMMON_LUNCH_CHOICES definitions within
-# each AndroidProducts.mk file.
-#
-define get-product-makefiles
-$(sort \
-  $(eval _COMMON_LUNCH_CHOICES :=) \
-  $(foreach f,$(1), \
-    $(eval PRODUCT_MAKEFILES :=) \
-    $(eval COMMON_LUNCH_CHOICES :=) \
-    $(eval LOCAL_DIR := $(patsubst %/,%,$(dir $(f)))) \
-    $(eval include $(f)) \
-    $(call _validate-common-lunch-choices,$(COMMON_LUNCH_CHOICES),$(PRODUCT_MAKEFILES)) \
-    $(eval _COMMON_LUNCH_CHOICES += $(COMMON_LUNCH_CHOICES)) \
-    $(PRODUCT_MAKEFILES) \
-   ) \
-  $(eval PRODUCT_MAKEFILES :=) \
-  $(eval LOCAL_DIR :=) \
-  $(eval COMMON_LUNCH_CHOICES := $(sort $(_COMMON_LUNCH_CHOICES))) \
-  $(eval _COMMON_LUNCH_CHOICES :=) \
- )
-endef
-
-#
-# Returns the sorted concatenation of all PRODUCT_MAKEFILES
-# variables set in all AndroidProducts.mk files.
-# $(call ) isn't necessary.
-#
-define get-all-product-makefiles
-$(call get-product-makefiles,$(_find-android-products-files))
-endef
-
 # Variables that are meant to hold only a single value.
 # - The value set in the current makefile takes precedence over inherited values
 # - If multiple inherited makefiles set the var, the first-inherited value wins
@@ -268,6 +176,7 @@
 _product_single_value_vars += PRODUCT_ODM_VERITY_PARTITION
 _product_single_value_vars += PRODUCT_VENDOR_DLKM_VERITY_PARTITION
 _product_single_value_vars += PRODUCT_ODM_DLKM_VERITY_PARTITION
+_product_single_value_vars += PRODUCT_SYSTEM_DLKM_VERITY_PARTITION
 _product_single_value_vars += PRODUCT_SYSTEM_SERVER_DEBUG_INFO
 _product_single_value_vars += PRODUCT_OTHER_JAVA_DEBUG_INFO
 
@@ -298,6 +207,7 @@
 _product_single_value_vars += PRODUCT_ODM_BASE_FS_PATH
 _product_single_value_vars += PRODUCT_VENDOR_DLKM_BASE_FS_PATH
 _product_single_value_vars += PRODUCT_ODM_DLKM_BASE_FS_PATH
+_product_single_value_vars += PRODUCT_SYSTEM_DLKM_BASE_FS_PATH
 
 # The first API level this product shipped with
 _product_single_value_vars += PRODUCT_SHIPPING_API_LEVEL
@@ -391,6 +301,7 @@
 _product_single_value_vars += PRODUCT_BUILD_ODM_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_VENDOR_DLKM_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_ODM_DLKM_IMAGE
+_product_single_value_vars += PRODUCT_BUILD_SYSTEM_DLKM_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_CACHE_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_RAMDISK_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_USERDATA_IMAGE
@@ -489,17 +400,20 @@
 # See e.g. product-graph.mk for an example of this.
 #
 define inherit-product
-  $(if $(findstring ../,$(1)),\
-    $(eval np := $(call normalize-paths,$(1))),\
-    $(eval np := $(strip $(1))))\
-  $(foreach v,$(_product_var_list), \
-      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(np))) \
-  $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
-  $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
-  $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
-  $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
-  $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
-  $(call dump-config-vals,$(current_mk),inherit)
+  $(eval _inherit_product_wildcard := $(wildcard $(1)))\
+  $(if $(_inherit_product_wildcard),,$(error $(1) does not exist.))\
+  $(foreach part,$(_inherit_product_wildcard),\
+    $(if $(findstring ../,$(part)),\
+      $(eval np := $(call normalize-paths,$(part))),\
+      $(eval np := $(strip $(part))))\
+    $(foreach v,$(_product_var_list), \
+        $(eval $(v) := $($(v)) $(INHERIT_TAG)$(np))) \
+    $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
+    $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
+    $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
+    $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
+    $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+    $(call dump-config-vals,$(current_mk),inherit))
 endef
 
 # Specifies a number of path prefixes, relative to PRODUCT_OUT, where the
diff --git a/core/product_config.mk b/core/product_config.mk
index 6fae73e..be4aded 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -112,8 +112,7 @@
 
 # Return empty unless the board is QCOM
 define is-vendor-board-qcom
-$(if $(strip $(TARGET_BOARD_PLATFORM) $(QCOM_BOARD_PLATFORMS)),\
-  $(filter $(TARGET_BOARD_PLATFORM),$(QCOM_BOARD_PLATFORMS)),\
+$(if $(strip $(TARGET_BOARD_PLATFORM) $(QCOM_BOARD_PLATFORMS)),$(filter $(TARGET_BOARD_PLATFORM),$(QCOM_BOARD_PLATFORMS)),\
   $(error both TARGET_BOARD_PLATFORM=$(TARGET_BOARD_PLATFORM) and QCOM_BOARD_PLATFORMS=$(QCOM_BOARD_PLATFORMS)))
 endef
 
@@ -147,32 +146,73 @@
 include $(BUILD_SYSTEM)/product.mk
 include $(BUILD_SYSTEM)/device.mk
 
-# Read in all of the product definitions specified by the AndroidProducts.mk
-# files in the tree.
-all_product_configs := $(get-all-product-makefiles)
+# Read all product definitions.
+#
+# Products are defined in AndroidProducts.mk files:
+android_products_makefiles := $(file <$(OUT_DIR)/.module_paths/AndroidProducts.mk.list) \
+  $(SRC_TARGET_DIR)/product/AndroidProducts.mk
 
-all_named_products :=
+# An AndroidProduct.mk file sets the following variables:
+#   PRODUCT_MAKEFILES specifies product makefiles. Each item in this list
+#     is either a <product>:path/to/file.mk, or just path/to/<product.mk>
+#   COMMON_LUNCH_CHOICES specifies <product>-<variant> values to be shown
+#     in the `lunch` menu
+#   STARLARK_OPT_IN_PRODUCTS specifies products to use Starlark-based
+#     product configuration by default
 
-# Find the product config makefile for the current product.
-# all_product_configs consists items like:
-# <product_name>:<path_to_the_product_makefile>
-# or just <path_to_the_product_makefile> in case the product name is the
-# same as the base filename of the product config makefile.
-current_product_makefile :=
-all_product_makefiles :=
-$(foreach f, $(all_product_configs),\
-    $(eval _cpm_words := $(call _decode-product-name,$(f)))\
-    $(eval _cpm_word1 := $(word 1,$(_cpm_words)))\
-    $(eval _cpm_word2 := $(word 2,$(_cpm_words)))\
-    $(eval all_product_makefiles += $(_cpm_word2))\
-    $(eval all_named_products += $(_cpm_word1))\
-    $(if $(filter $(TARGET_PRODUCT),$(_cpm_word1)),\
-        $(eval current_product_makefile += $(_cpm_word2)),))
-_cpm_words :=
-_cpm_word1 :=
-_cpm_word2 :=
-current_product_makefile := $(strip $(current_product_makefile))
-all_product_makefiles := $(strip $(all_product_makefiles))
+# Builds a list of first/second elements of each pair:
+#   $(call _first,a:A b:B,:) returns 'a b'
+#   $(call _second,a-A b-B,-) returns 'A B'
+_first=$(filter-out $(2)%,$(subst $(2),$(space)$(2),$(1)))
+_second=$(filter-out %$(2),$(subst $(2),$(2)$(space),$(1)))
+
+# Returns <product>:<path> pair from a PRODUCT_MAKEFILE item.
+# If an item is <product>:path/to/file.mk, return it as is,
+# otherwise assume that an item is path/to/<product>.mk and
+# return <product>:path/to/<product>.mk
+_product-spec=$(strip $(if $(findstring :,$(1)),$(1),$(basename $(notdir $(1))):$(1)))
+
+# Reads given AndroidProduct.mk file and sets the following variables:
+#  ap_product_paths -- the list of <product>:<path> pairs
+#  ap_common_lunch_choices -- the list of <product>-<build variant> items
+#  ap_products_using_starlark_config -- the list of products using starlark config
+# In addition, validates COMMON_LUNCH_CHOICES and STARLARK_OPT_IN_PRODUCTS values
+define _read-ap-file
+  $(eval PRODUCT_MAKEFILES :=) \
+  $(eval COMMON_LUNCH_CHOICES :=) \
+  $(eval STARLARK_OPT_IN_PRODUCTS := ) \
+  $(eval ap_product_paths :=) \
+  $(eval LOCAL_DIR := $(patsubst %/,%,$(dir $(f)))) \
+  $(eval include $(f)) \
+  $(foreach p, $(PRODUCT_MAKEFILES),$(eval ap_product_paths += $(call _product-spec,$(p)))) \
+  $(eval ap_common_lunch_choices  := $(COMMON_LUNCH_CHOICES)) \
+  $(eval ap_products_using_starlark_config := $(STARLARK_OPT_IN_PRODUCTS)) \
+  $(eval _products := $(call _first,$(ap_product_paths),:)) \
+  $(eval _bad := $(filter-out $(_products),$(call _first,$(ap_common_lunch_choices),-))) \
+  $(if $(_bad),$(error COMMON_LUNCH_CHOICES contains products(s) not defined in this file: $(_bad))) \
+  $(eval _bad := $(filter-out %-eng %-userdebug %-user,$(ap_common_lunch_choices))) \
+  $(if $(_bad),$(error invalid variant in COMMON_LUNCH_CHOICES: $(_bad)))
+  $(eval _bad := $(filter-out $(_products),$(ap_products_using_starlark_config))) \
+  $(if $(_bad),$(error STARLARK_OPT_IN_PRODUCTS contains product(s) not defined in this file: $(_bad)))
+endef
+
+# Build cumulative lists of all product specs/lunch choices/Starlark-based products.
+product_paths :=
+common_lunch_choices :=
+products_using_starlark_config :=
+$(foreach f,$(android_products_makefiles), \
+    $(call _read-ap-file,$(f)) \
+    $(eval product_paths += $(ap_product_paths)) \
+    $(eval common_lunch_choices += $(ap_common_lunch_choices)) \
+    $(eval products_using_starlark_config += $(ap_products_using_starlark_config)) \
+)
+
+# Dedup, extract product names, etc.
+product_paths :=$(sort $(product_paths))
+all_named_products := $(call _first,$(product_paths),:)
+all_product_makefiles := $(call _second,$(product_paths),:)
+current_product_makefile := $(call _second,$(filter $(TARGET_PRODUCT):%,$(product_paths)),:)
+COMMON_LUNCH_CHOICES := $(sort $(common_lunch_choices))
 
 load_all_product_makefiles :=
 ifneq (,$(filter product-graph, $(MAKECMDGOALS)))
@@ -196,11 +236,10 @@
 $(call import-products, $(all_product_makefiles))
 else
 # Import just the current product.
-ifndef current_product_makefile
-$(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
-endif
-ifneq (1,$(words $(current_product_makefile)))
-$(error Product "$(TARGET_PRODUCT)" ambiguous: matches $(current_product_makefile))
+$(if $(current_product_makefile),,$(error Can not locate config makefile for product "$(TARGET_PRODUCT)"))
+ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
+  RBC_PRODUCT_CONFIG := true
+  RBC_BOARD_CONFIG := true
 endif
 
 ifndef RBC_PRODUCT_CONFIG
@@ -566,6 +605,7 @@
     ODM \
     VENDOR_DLKM \
     ODM_DLKM \
+    SYSTEM_DLKM \
     CACHE \
     RAMDISK \
     USERDATA \
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 1ccffcc..8f27c99 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -158,17 +158,17 @@
 
         # Run this one, obtaining its configuration and child PCMs.
         if _options.trace_modules:
-            print("%d:" % n)
+            print("#%d: %s" % (n, name))
 
         # Run PCM.
         handle = __h_new()
         pcm(globals, handle)
 
         # Now we know everything about this PCM, record it in 'configs'.
-        children = __h_inherited_modules(handle)
+        children = handle.inherited_modules
         if _options.trace_modules:
-            print("   ", "    ".join(children.keys()))
-        configs[name] = (pcm, __h_cfg(handle), children.keys(), False)
+            print("#   ", "    ".join(children.keys()))
+        configs[name] = (pcm, handle.cfg, children.keys(), False)
         pcm_count = pcm_count + 1
 
         if len(children) == 0:
@@ -191,9 +191,9 @@
         fail("Ran %d modules but postfix tree has only %d entries" % (pcm_count, len(config_postfix)))
 
     if _options.trace_modules:
-        print("\n---Postfix---")
+        print("\n#---Postfix---")
         for x in config_postfix:
-            print("   ", x)
+            print("#   ", x)
 
     # Traverse the tree from the bottom, evaluating inherited values
     for pcm_name in config_postfix:
@@ -235,7 +235,7 @@
     input_variables_init(globals_base, h_base)
     input_variables_init(globals, h)
     board_config_init(globals, h)
-    return (globals, _dictionary_difference(h[0], h_base[0]), globals_base)
+    return (globals, _dictionary_difference(h.cfg, h_base.cfg), globals_base)
 
 
 def _substitute_inherited(configs, pcm_name, cfg):
@@ -392,11 +392,11 @@
 #   default value list (initially empty, modified by inheriting)
 def __h_new():
     """Constructs a handle which is passed to PCM."""
-    return (dict(), dict(), list())
-
-def __h_inherited_modules(handle):
-    """Returns PCM's inherited modules dict."""
-    return handle[1]
+    return struct(
+        cfg = dict(),
+        inherited_modules = dict(),
+        default_list_value = list()
+    )
 
 def __h_cfg(handle):
     """Returns PCM's product configuration attributes dict.
@@ -404,7 +404,7 @@
     This function is also exported as rblf.cfg, and every PCM
     calls it at the beginning.
     """
-    return handle[0]
+    return handle.cfg
 
 def _setdefault(handle, attr):
     """If attribute has not been set, assigns default value to it.
@@ -413,9 +413,9 @@
     Only list attributes are initialized this way. The default
     value is kept in the PCM's handle. Calling inherit() updates it.
     """
-    cfg = handle[0]
+    cfg = handle.cfg
     if cfg.get(attr) == None:
-        cfg[attr] = list(handle[2])
+        cfg[attr] = list(handle.default_list_value)
     return cfg[attr]
 
 def _inherit(handle, pcm_name, pcm):
@@ -424,12 +424,11 @@
     This function is exported as rblf.inherit, PCM calls it when
     a module is inherited.
     """
-    cfg, inherited, default_lv = handle
-    inherited[pcm_name] = pcm
-    default_lv.append(_indirect(pcm_name))
+    handle.inherited_modules[pcm_name] = pcm
+    handle.default_list_value.append(_indirect(pcm_name))
 
     # Add inherited module reference to all configuration values
-    for attr, val in cfg.items():
+    for attr, val in handle.cfg.items():
         if type(val) == "list":
             val.append(_indirect(pcm_name))
 
@@ -467,14 +466,21 @@
     #TODO(asmundak)
     pass
 
+def _add_product_dex_preopt_module_config(handle, modules, config):
+    """Equivalent to add-product-dex-preopt-module-config from build/make/core/product.mk."""
+    modules = __words(modules)
+    config = _mkstrip(config).replace(" ", "|@SP@|")
+    _setdefault(handle, "PRODUCT_DEX_PREOPT_MODULE_CONFIGS")
+    handle.cfg["PRODUCT_DEX_PREOPT_MODULE_CONFIGS"] += [m + "=" + config for m in modules]
+
 def _file_wildcard_exists(file_pattern):
     """Return True if there are files matching given bash pattern."""
     return len(rblf_wildcard(file_pattern)) > 0
 
 def _find_and_copy(pattern, from_dir, to_dir):
     """Return a copy list for the files matching the pattern."""
-    return sorted(["%s/%s:%s/%s" % (
-        from_dir, f, to_dir, f) for f in rblf_find_files(from_dir, pattern, only_files=1)])
+    return sorted([("%s/%s:%s/%s" % (from_dir, f, to_dir, f))
+        .replace("//", "/") for f in rblf_find_files(from_dir, pattern, only_files=1)])
 
 def _findstring(needle, haystack):
     """Equivalent to GNU make's $(findstring)."""
@@ -719,6 +725,7 @@
     soong_config_set = _soong_config_set,
     soong_config_get = _soong_config_get,
     abspath = _abspath,
+    add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config,
     addprefix = _addprefix,
     addsuffix = _addsuffix,
     board_platform_in = _board_platform_in,
diff --git a/core/proguard.flags b/core/proguard.flags
index 50049cb..185275e 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -15,35 +15,24 @@
 @**.VisibleForTesting *;
 }
 
-# Understand the @Keep support annotation.
--keep class android.support.annotation.Keep
--keep class androidx.annotation.Keep
+# Understand the common @Keep annotation from various Android packages:
+#  * android.support.annotation
+#  * androidx.annotation
+#  * com.android.internal.annotations
+-keep class **android**.annotation*.Keep
 
--keep @android.support.annotation.Keep class * {*;}
--keep @androidx.annotation.Keep class * {*;}
+-keep @**android**.annotation*.Keep class * { *; }
 
 -keepclasseswithmembers class * {
-    @android.support.annotation.Keep <methods>;
+    @**android**.annotation*.Keep <methods>;
 }
 
 -keepclasseswithmembers class * {
-    @androidx.annotation.Keep <methods>;
+    @**android**.annotation*.Keep <fields>;
 }
 
 -keepclasseswithmembers class * {
-    @android.support.annotation.Keep <fields>;
-}
-
--keepclasseswithmembers class * {
-    @androidx.annotation.Keep <fields>;
-}
-
--keepclasseswithmembers class * {
-    @android.support.annotation.Keep <init>(...);
-}
-
--keepclasseswithmembers class * {
-    @androidx.annotation.Keep <init>(...);
+    @**android**.annotation*.Keep <init>(...);
 }
 
 -include proguard_basic_keeps.flags
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 28ec2d0..30c2341 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -9,7 +9,7 @@
 }
 
 # For native methods, see http://proguard.sourceforge.net/manual/examples.html#native
--keepclasseswithmembernames class * {
+-keepclasseswithmembernames,includedescriptorclasses class * {
     native <methods>;
 }
 
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index e62175f..483b957 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -18,7 +18,7 @@
     <option name="test-suite-tag" value="robolectric" />
     <option name="test-suite-tag" value="robolectric-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk9/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk11/linux-x86/" />
     <option name="exclude-paths" value="java" />
     <option name="use-robolectric-resources" value="true" />
 
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a8071a3..fd957c3 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -28,6 +28,7 @@
 $(call add_json_str,  Platform_sdk_codename,             $(PLATFORM_VERSION_CODENAME))
 $(call add_json_bool, Platform_sdk_final,                $(filter REL,$(PLATFORM_VERSION_CODENAME)))
 $(call add_json_val,  Platform_sdk_extension_version,    $(PLATFORM_SDK_EXTENSION_VERSION))
+$(call add_json_val,  Platform_base_sdk_extension_version, $(PLATFORM_BASE_SDK_EXTENSION_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
 $(call add_json_str,  Platform_security_patch,           $(PLATFORM_SECURITY_PATCH))
 $(call add_json_str,  Platform_preview_sdk_version,      $(PLATFORM_PREVIEW_SDK_VERSION))
@@ -118,6 +119,7 @@
 
 $(call add_json_bool, GcovCoverage,                      $(filter true,$(NATIVE_COVERAGE)))
 $(call add_json_bool, ClangCoverage,                     $(filter true,$(CLANG_COVERAGE)))
+$(call add_json_bool, ClangCoverageContinuousMode,       $(filter true,$(CLANG_COVERAGE_CONTINUOUS_MODE)))
 $(call add_json_list, NativeCoveragePaths,               $(NATIVE_COVERAGE_PATHS))
 $(call add_json_list, NativeCoverageExcludePaths,        $(NATIVE_COVERAGE_EXCLUDE_PATHS))
 
@@ -174,6 +176,7 @@
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
 $(call add_json_str,  VendorDlkmPath,                    $(TARGET_COPY_OUT_VENDOR_DLKM))
 $(call add_json_str,  OdmDlkmPath,                       $(TARGET_COPY_OUT_ODM_DLKM))
+$(call add_json_str,  SystemDlkmPath,                    $(TARGET_COPY_OUT_SYSTEM_DLKM))
 $(call add_json_str,  ProductPath,                       $(TARGET_COPY_OUT_PRODUCT))
 $(call add_json_str,  SystemExtPath,                     $(TARGET_COPY_OUT_SYSTEM_EXT))
 $(call add_json_bool, MinimizeJavaDebugInfo,             $(filter true,$(PRODUCT_MINIMIZE_JAVA_DEBUG_INFO)))
@@ -199,11 +202,14 @@
 $(call add_json_list, BoardOdmSepolicyDirs,              $(BOARD_ODM_SEPOLICY_DIRS))
 $(call add_json_list, BoardVendorDlkmSepolicyDirs,       $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
 $(call add_json_list, BoardOdmDlkmSepolicyDirs,          $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
+$(call add_json_list, BoardSystemDlkmSepolicyDirs,       $(BOARD_SYSTEM_DLKM_SEPOLICY_DIRS))
 # TODO: BOARD_PLAT_* dirs only kept for compatibility reasons. Will be a hard error on API level 31
 $(call add_json_list, SystemExtPublicSepolicyDirs,       $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS) $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
 $(call add_json_list, SystemExtPrivateSepolicyDirs,      $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS) $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
 $(call add_json_list, BoardSepolicyM4Defs,               $(BOARD_SEPOLICY_M4DEFS))
 $(call add_json_str,  BoardSepolicyVers,                 $(BOARD_SEPOLICY_VERS))
+$(call add_json_str,  SystemExtSepolicyPrebuiltApiDir,   $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
+$(call add_json_str,  ProductSepolicyPrebuiltApiDir,     $(BOARD_PRODUCT_PREBUILT_DIR))
 
 $(call add_json_str,  PlatformSepolicyVersion,           $(PLATFORM_SEPOLICY_VERSION))
 $(call add_json_str,  TotSepolicyVersion,                $(TOT_SEPOLICY_VERSION))
@@ -265,6 +271,7 @@
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
+$(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
 
 $(call add_json_bool, BuildDebugfsRestrictionsEnabled, $(filter true,$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)))
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 1d38f8c..b9c05fe 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -128,6 +128,8 @@
 	        cat $(file) >> $$@;\
 	    fi;)
 	$(hide) echo "# end of file" >> $$@
+
+$(call declare-0p-target,$(2))
 endef
 
 # -----------------------------------------------------------------
@@ -262,6 +264,7 @@
 	        BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
 	        BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT="$(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT)" \
 	        PLATFORM_VERSION="$(PLATFORM_VERSION)" \
+	        PLATFORM_DISPLAY_VERSION="$(PLATFORM_DISPLAY_VERSION)" \
 	        PLATFORM_VERSION_LAST_STABLE="$(PLATFORM_VERSION_LAST_STABLE)" \
 	        PLATFORM_SECURITY_PATCH="$(PLATFORM_SECURITY_PATCH)" \
 	        PLATFORM_BASE_OS="$(PLATFORM_BASE_OS)" \
@@ -270,6 +273,7 @@
 	        PLATFORM_PREVIEW_SDK_FINGERPRINT="$$(cat $(API_FINGERPRINT))" \
 	        PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
 	        PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
+	        PLATFORM_VERSION_KNOWN_CODENAMES="$(PLATFORM_VERSION_KNOWN_CODENAMES)" \
 	        PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION="$(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION)" \
 	        BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
 	        $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
@@ -306,10 +310,6 @@
     PRODUCT_VENDOR_PROPERTIES
 endif
 
-_blacklist_names_ := \
-    $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST) \
-    ro.product.first_api_level
-
 INSTALLED_BUILD_PROP_TARGET := $(TARGET_OUT)/build.prop
 
 $(eval $(call build-properties,\
@@ -317,7 +317,7 @@
     $(INSTALLED_BUILD_PROP_TARGET),\
     $(_prop_files_),\
     $(_prop_vars_),\
-    $(_blacklist_names_),\
+    $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST),\
     $(empty),\
     $(empty)))
 
@@ -464,6 +464,20 @@
     $(empty),\
     $(empty)))
 
+# ----------------------------------------------------------------
+# system_dlkm/build.prop
+#
+
+INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_DLKM)/etc/build.prop
+$(eval $(call build-properties,\
+    system_dlkm,\
+    $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET),\
+    $(empty),\
+    $(empty),\
+    $(empty),\
+    $(empty),\
+    $(empty)))
+
 # -----------------------------------------------------------------
 # system_ext/etc/build.prop
 #
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index aeeb403..8097535 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -1,4 +1,5 @@
 # Print a list of the modules that could be built
+# Currently runtime_dependencies only include the runtime libs information for cc binaries.
 
 MODULE_INFO_JSON := $(PRODUCT_OUT)/module-info.json
 
@@ -24,6 +25,9 @@
 			'"test_mainline_modules": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)", )], ' \
 			'"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)", ' \
 			'"data": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA)),"$(w)", )], ' \
+			'"runtime_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)", )], ' \
+			'"data_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)", )], ' \
+			'"supported_variants": [$(foreach w,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)", )], ' \
 			'},\n' \
 	 ) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
 	$(hide) echo '}' >> $@
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 051de62..038b9c4 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -19,6 +19,7 @@
 #
 # Guarantees that the following are defined:
 #     PLATFORM_VERSION
+#     PLATFORM_DISPLAY_VERSION
 #     PLATFORM_SDK_VERSION
 #     PLATFORM_VERSION_CODENAME
 #     DEFAULT_APP_TARGET_SDK
@@ -54,6 +55,11 @@
 # release build.  If this is a final release build, it is simply "REL".
 PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
 
+# This is the user-visible version.  In a final release build it should
+# be empty to use PLATFORM_VERSION as the user-visible version.  For
+# a preview release it can be set to a user-friendly value like `12 Preview 1`
+PLATFORM_DISPLAY_VERSION :=
+
 ifndef PLATFORM_SDK_VERSION
   # This is the canonical definition of the SDK version, which defines
   # the set of APIs and functionality available in the platform.  It
@@ -67,7 +73,7 @@
   # When you increment the PLATFORM_SDK_VERSION please ensure you also
   # clear out the following text file of all older PLATFORM_VERSION's:
   # cts/tests/tests/os/assets/platform_versions.txt
-  PLATFORM_SDK_VERSION := 31
+  PLATFORM_SDK_VERSION := 32
 endif
 .KATI_READONLY := PLATFORM_SDK_VERSION
 
@@ -79,13 +85,20 @@
 PLATFORM_BASE_SDK_EXTENSION_VERSION := 1
 .KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
 
+# This is are all known codenames starting from Q.
+PLATFORM_VERSION_KNOWN_CODENAMES := Q R S Sv2 Tiramisu
+# Convert from space separated list to comma separated
+PLATFORM_VERSION_KNOWN_CODENAMES := \
+  $(call normalize-comma-list,$(PLATFORM_VERSION_KNOWN_CODENAMES))
+.KATI_READONLY := PLATFORM_VERSION_KNOWN_CODENAMES
+
 ifndef PLATFORM_SECURITY_PATCH
     #  Used to indicate the security patch that has been applied to the device.
     #  It must signify that the build includes all security patches issued up through the designated Android Public Security Bulletin.
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2022-01-05
+    PLATFORM_SECURITY_PATCH := 2022-03-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/core/version_util.mk b/core/version_util.mk
index b7c4e48..3a0d4b5 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -90,6 +90,15 @@
   PLATFORM_VERSION_CODENAME \
   PLATFORM_VERSION_ALL_CODENAMES
 
+ifneq (REL,$(PLATFORM_VERSION_CODENAME))
+  codenames := \
+    $(subst $(comma),$(space),$(strip $(PLATFORM_VERSION_KNOWN_CODENAMES)))
+  ifeq ($(filter $(PLATFORM_VERSION_CODENAME),$(codenames)),)
+    $(error '$(PLATFORM_VERSION_CODENAME)' is not in '$(codenames)'. \
+        Add PLATFORM_VERSION_CODENAME to PLATFORM_VERSION_KNOWN_CODENAMES)
+  endif
+endif
+
 ifndef PLATFORM_VERSION
   ifeq (REL,$(PLATFORM_VERSION_CODENAME))
       PLATFORM_VERSION := $(PLATFORM_VERSION_LAST_STABLE)
@@ -99,6 +108,10 @@
 endif
 .KATI_READONLY := PLATFORM_VERSION
 
+ifndef PLATFORM_DISPLAY_VERSION
+  PLATFORM_DISPLAY_VERSION := $(PLATFORM_VERSION)
+endif
+.KATI_READONLY := PLATFORM_DISPLAY_VERSION
 
 ifeq (REL,$(PLATFORM_VERSION_CODENAME))
   PLATFORM_PREVIEW_SDK_VERSION := 0
diff --git a/envsetup.sh b/envsetup.sh
index a23bbad..87e6e0a 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -625,7 +625,7 @@
         return
     fi
 
-    echo "Lunch menu... pick a combo:"
+    echo "Lunch menu .. Here are the common combinations:"
 
     local i=1
     local choice
@@ -647,12 +647,16 @@
         return 1
     fi
 
+    local used_lunch_menu=0
+
     if [ "$1" ]; then
         answer=$1
     else
         print_lunch_menu
-        echo -n "Which would you like? [aosp_arm-eng] "
+        echo "Which would you like? [aosp_arm-eng]"
+        echo -n "Pick from common choices above (e.g. 13) or specify your own (e.g. aosp_barbet-eng): "
         read answer
+        used_lunch_menu=1
     fi
 
     local selection=
@@ -717,6 +721,11 @@
     fi
     export TARGET_BUILD_TYPE=release
 
+    if [ $used_lunch_menu -eq 1 ]; then
+      echo
+      echo "Hint: next time you can simply run 'lunch $selection'"
+    fi
+
     [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || echo
 
     set_stuff_for_environment
diff --git a/help.sh b/help.sh
index 06a9056..e51adc1 100755
--- a/help.sh
+++ b/help.sh
@@ -52,6 +52,8 @@
                             Stands for "VendorDlkm, NO Dependencies"
     odnod                   Quickly rebuild the odm_dlkm image from built packages
                             Stands for "OdmDlkm, NO Dependencies"
+    sdnod                   Quickly rebuild the system_dlkm image from built packages
+                            Stands for "SystemDlkm, NO Dependencies"
 
 
 So, for example, you could run:
diff --git a/target/board/ndk/BoardConfig.mk b/target/board/ndk/BoardConfig.mk
new file mode 100644
index 0000000..da8b5f3
--- /dev/null
+++ b/target/board/ndk/BoardConfig.mk
@@ -0,0 +1,21 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TARGET_ARCH_SUITE := ndk
+TARGET_USES_64_BIT_BINDER := true
+
+MALLOC_SVELTE := true
+
+USE_SAFESTACK := false
diff --git a/target/board/ndk/README.md b/target/board/ndk/README.md
new file mode 100644
index 0000000..d8f3a16
--- /dev/null
+++ b/target/board/ndk/README.md
@@ -0,0 +1,2 @@
+This device is suitable for a soong-only build that builds for all the architectures
+needed for the ndk.
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 7d9d90e..ee702e5 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -61,6 +61,7 @@
     $(LOCAL_DIR)/mainline_system_x86.mk \
     $(LOCAL_DIR)/mainline_system_x86_64.mk \
     $(LOCAL_DIR)/mainline_system_x86_arm.mk \
+    $(LOCAL_DIR)/ndk.mk \
     $(LOCAL_DIR)/sdk_arm64.mk \
     $(LOCAL_DIR)/sdk.mk \
     $(LOCAL_DIR)/sdk_phone_arm64.mk \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 3d299fb..694d057 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -78,6 +78,7 @@
     device_config \
     dmctl \
     dnsmasq \
+    dmesgd \
     DownloadProvider \
     dpm \
     dump.erofs \
@@ -294,7 +295,7 @@
 # HWASAN runtime for SANITIZE_TARGET=hwaddress builds
 ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
   PRODUCT_PACKAGES += \
-   libclang_rt.hwasan-aarch64-android.bootstrap
+   libclang_rt.hwasan.bootstrap
 endif
 
 # Jacoco agent JARS to be built and installed, if any.
diff --git a/target/product/core_no_zygote.mk b/target/product/core_no_zygote.mk
new file mode 100644
index 0000000..205a897
--- /dev/null
+++ b/target/product/core_no_zygote.mk
@@ -0,0 +1,30 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Inherit from this product for devices that do not include a zygote using:
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+# The inheritance for this must come before the inheritance chain that leads
+# to core_minimal.mk.
+
+# Copy the no-zygote startup script
+PRODUCT_COPY_FILES += system/core/rootdir/init.no_zygote.rc:system/etc/init/hw/init.no_zygote.rc
+
+# Set the zygote property to select the no-zygote script.
+# This line must be parsed before the one in core_minimal.mk
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=no_zygote
+
+TARGET_SUPPORTS_32_BIT_APPS := false
+TARGET_SUPPORTS_64_BIT_APPS := false
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 3223002..851a2cb 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -63,7 +63,7 @@
     com.android.scheduling:framework-scheduling \
     com.android.sdkext:framework-sdkextensions \
     com.android.tethering:framework-connectivity \
-    com.android.tethering:framework-connectivity-tiramisu \
+    com.android.tethering:framework-connectivity-t \
     com.android.tethering:framework-tethering \
     com.android.wifi:framework-wifi
 
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index 80d34be..fb0370e 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,6 +22,10 @@
 # Ramdisk
 PRODUCT_PACKAGES += \
     init_first_stage \
+    e2fsck.ramdisk \
+    fsck.f2fs.ramdisk \
+    tune2fs.ramdisk \
+    snapuserd.ramdisk \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
diff --git a/target/product/gsi/32.txt b/target/product/gsi/32.txt
new file mode 100644
index 0000000..971ec92
--- /dev/null
+++ b/target/product/gsi/32.txt
@@ -0,0 +1,223 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V2-ndk_platform.so
+VNDK-SP: android.hardware.common.fmq-V1-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common-V2-ndk_platform.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libdmabufheap.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk_platform.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.gnss-V1-ndk_platform.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health.storage-V1-ndk_platform.so
+VNDK-core: android.hardware.identity-V3-ndk_platform.so
+VNDK-core: android.hardware.keymaster-V3-ndk_platform.so
+VNDK-core: android.hardware.light-V1-ndk_platform.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack-V1-ndk_platform.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.oemlock-V1-ndk_platform.so
+VNDK-core: android.hardware.power-V2-ndk_platform.so
+VNDK-core: android.hardware.power.stats-V1-ndk_platform.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
+VNDK-core: android.hardware.security.keymint-V1-ndk_platform.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk_platform.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk_platform.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.vibrator-V2-ndk_platform.so
+VNDK-core: android.hardware.weaver-V1-ndk_platform.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.system.keystore2-V1-ndk_platform.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
+VNDK-product: android.hardware.audio.common@2.0.so
+VNDK-product: android.hardware.configstore@1.0.so
+VNDK-product: android.hardware.configstore@1.1.so
+VNDK-product: android.hardware.graphics.allocator@2.0.so
+VNDK-product: android.hardware.graphics.allocator@3.0.so
+VNDK-product: android.hardware.graphics.allocator@4.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-product: android.hardware.graphics.common@1.0.so
+VNDK-product: android.hardware.graphics.common@1.1.so
+VNDK-product: android.hardware.graphics.common@1.2.so
+VNDK-product: android.hardware.graphics.mapper@2.0.so
+VNDK-product: android.hardware.graphics.mapper@2.1.so
+VNDK-product: android.hardware.graphics.mapper@3.0.so
+VNDK-product: android.hardware.graphics.mapper@4.0.so
+VNDK-product: android.hardware.media.bufferpool@2.0.so
+VNDK-product: android.hardware.media.omx@1.0.so
+VNDK-product: android.hardware.media@1.0.so
+VNDK-product: android.hardware.memtrack@1.0.so
+VNDK-product: android.hardware.renderscript@1.0.so
+VNDK-product: android.hardware.soundtrigger@2.0.so
+VNDK-product: android.hidl.memory.token@1.0.so
+VNDK-product: android.hidl.memory@1.0.so
+VNDK-product: android.hidl.safe_union@1.0.so
+VNDK-product: android.hidl.token@1.0.so
+VNDK-product: android.system.suspend@1.0.so
+VNDK-product: libaudioutils.so
+VNDK-product: libbacktrace.so
+VNDK-product: libbase.so
+VNDK-product: libc++.so
+VNDK-product: libcamera_metadata.so
+VNDK-product: libcap.so
+VNDK-product: libcompiler_rt.so
+VNDK-product: libcrypto.so
+VNDK-product: libcurl.so
+VNDK-product: libcutils.so
+VNDK-product: libevent.so
+VNDK-product: libexpat.so
+VNDK-product: libfmq.so
+VNDK-product: libhidlbase.so
+VNDK-product: libhidlmemory.so
+VNDK-product: libion.so
+VNDK-product: libjpeg.so
+VNDK-product: libjsoncpp.so
+VNDK-product: libldacBT_abr.so
+VNDK-product: libldacBT_enc.so
+VNDK-product: liblz4.so
+VNDK-product: liblzma.so
+VNDK-product: libminijail.so
+VNDK-product: libnl.so
+VNDK-product: libpcre2.so
+VNDK-product: libpiex.so
+VNDK-product: libpng.so
+VNDK-product: libprocessgroup.so
+VNDK-product: libprocinfo.so
+VNDK-product: libspeexresampler.so
+VNDK-product: libssl.so
+VNDK-product: libtinyalsa.so
+VNDK-product: libtinyxml2.so
+VNDK-product: libunwindstack.so
+VNDK-product: libutils.so
+VNDK-product: libutilscallstack.so
+VNDK-product: libwifi-system-iface.so
+VNDK-product: libxml2.so
+VNDK-product: libyuv.so
+VNDK-product: libz.so
+VNDK-product: libziparchive.so
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
index 88472eb..a8af9c4 100644
--- a/target/product/gsi/Android.bp
+++ b/target/product/gsi/Android.bp
@@ -14,11 +14,7 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 filegroup {
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 167ffcf..85e551d 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -73,8 +73,9 @@
 # Script to update the latest VNDK lib list
 include $(CLEAR_VARS)
 LOCAL_MODULE := update-vndk-list.sh
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := EXECUTABLES
 LOCAL_MODULE_STEM := $(LOCAL_MODULE)
 LOCAL_IS_HOST_MODULE := true
@@ -170,8 +171,9 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := vndk_package
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 # Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
 LOCAL_REQUIRED_MODULES := \
     $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
@@ -195,8 +197,9 @@
 	_vndk_versions += $(BOARD_VNDK_VERSION)
 endif
 LOCAL_MODULE := vndk_apex_snapshot_package
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(_vndk_versions),com.android.vndk.v$(vndk_ver))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -209,8 +212,9 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := gsi_skip_mount.cfg
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_STEM := skip_mount.cfg
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
@@ -234,8 +238,9 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := init.gsi.rc
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_SYSTEM_EXT_MODULE := true
@@ -246,8 +251,9 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := init.vndk-nodef.rc
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_SYSTEM_EXT_MODULE := true
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 3cad6f1..f9c1f3d 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -82,6 +82,7 @@
 VNDK-core: android.hardware.media@1.0.so
 VNDK-core: android.hardware.memtrack-V1-ndk.so
 VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.nfc-V1-ndk.so
 VNDK-core: android.hardware.oemlock-V1-ndk.so
 VNDK-core: android.hardware.power-V2-ndk.so
 VNDK-core: android.hardware.power.stats-V1-ndk.so
diff --git a/target/product/ndk.mk b/target/product/ndk.mk
new file mode 100644
index 0000000..1dfd0db
--- /dev/null
+++ b/target/product/ndk.mk
@@ -0,0 +1,21 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This device is suitable for soong-only build that builds for all the architectures
+# needed for the ndk. It is not going to work for normal `lunch <foo> && m` workflows.
+
+PRODUCT_NAME := ndk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := ndk
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 99f7742..1e26d59 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -1,11 +1,7 @@
 // AOSP test certificate
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 android_app_certificate {
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 9daa3bf..ad25a92 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -5,8 +5,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := verity_key
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
@@ -26,8 +27,9 @@
 ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
   include $(CLEAR_VARS)
   LOCAL_MODULE := verity_key_ramdisk
-  LOCAL_LICENSE_KINDS := legacy_restricted
-  LOCAL_LICENSE_CONDITIONS := restricted
+  LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+  LOCAL_LICENSE_CONDITIONS := notice
+  LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
   LOCAL_MODULE_CLASS := ETC
   LOCAL_SRC_FILES := verity_key
   LOCAL_MODULE_STEM := verity_key
@@ -41,8 +43,9 @@
   ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
     include $(CLEAR_VARS)
     LOCAL_MODULE := adb_keys
-    LOCAL_LICENSE_KINDS := legacy_restricted
-    LOCAL_LICENSE_CONDITIONS := restricted
+    LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+    LOCAL_LICENSE_CONDITIONS := notice
+    LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
     LOCAL_MODULE_CLASS := ETC
     LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
     LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
@@ -57,8 +60,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := otacerts
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_STEM := otacerts.zip
 LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/security
@@ -81,8 +85,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := otacerts.recovery
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_STEM := otacerts.zip
 LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
diff --git a/target/product/telephony_vendor.mk b/target/product/telephony_vendor.mk
index 86dbcc9..94887cf 100644
--- a/target/product/telephony_vendor.mk
+++ b/target/product/telephony_vendor.mk
@@ -20,5 +20,3 @@
 # /vendor packages
 PRODUCT_PACKAGES := \
     rild \
-
-PRODUCT_COPY_FILES := \
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
new file mode 100644
index 0000000..f2639b4
--- /dev/null
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -0,0 +1,41 @@
+#
+# Copyright (C) 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file enables baseline features, such as io_uring,
+# userspace merge, etc. But sets compression method to none.
+# This .mk file also removes snapuserd from vendor ramdisk,
+# as T launching devices will have init_boot which has snapuserd
+# in generic ramdisk.
+# T launching devices should include this .mk file, and configure
+# compression algorithm by setting
+# PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD to gz or brotli. Complete
+# set of supported algorithms can be found in
+# system/core/fs_mgr/libsnapshot/cow_writer.cpp
+
+PRODUCT_VIRTUAL_AB_OTA := true
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.enabled=true
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true
+
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
+PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD ?= none
+PRODUCT_PACKAGES += \
+    snapuserd \
+    snapuserd.recovery \
+
diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk
index 88c58b8..d5bd2a5 100644
--- a/target/product/virtual_ab_ota/compression.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -18,6 +18,7 @@
 
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
 PRODUCT_VIRTUAL_AB_COMPRESSION := true
 PRODUCT_PACKAGES += \
     snapuserd.vendor_ramdisk \
diff --git a/tools/Android.bp b/tools/Android.bp
index 2f3b393..6601c60 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -14,15 +14,7 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-BSD
-    //   SPDX-license-identifier-CC-BY
-    //   SPDX-license-identifier-GPL
-    //   SPDX-license-identifier-MIT
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 python_binary_host {
diff --git a/tools/acp/Android.bp b/tools/acp/Android.bp
index 78738b0..47b23b2 100644
--- a/tools/acp/Android.bp
+++ b/tools/acp/Android.bp
@@ -4,11 +4,7 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_binary_host {
diff --git a/tools/atree/Android.bp b/tools/atree/Android.bp
index 7906d8b..fdae3e0 100644
--- a/tools/atree/Android.bp
+++ b/tools/atree/Android.bp
@@ -4,11 +4,7 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_binary_host {
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index a349cba..536a381 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -16,8 +16,10 @@
 echo "ro.build.version.preview_sdk_fingerprint=$PLATFORM_PREVIEW_SDK_FINGERPRINT"
 echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
 echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
+echo "ro.build.version.known_codenames=$PLATFORM_VERSION_KNOWN_CODENAMES"
 echo "ro.build.version.release=$PLATFORM_VERSION_LAST_STABLE"
 echo "ro.build.version.release_or_codename=$PLATFORM_VERSION"
+echo "ro.build.version.release_or_preview_display=$PLATFORM_DISPLAY_VERSION"
 echo "ro.build.version.security_patch=$PLATFORM_SECURITY_PATCH"
 echo "ro.build.version.base_os=$PLATFORM_BASE_OS"
 echo "ro.build.version.min_supported_target_sdk=$PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION"
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index bbeb76f..ec0f2f9 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -19,30 +19,84 @@
 
 blueprint_go_binary {
     name: "checkshare",
-    srcs: ["cmd/checkshare.go"],
+    srcs: ["cmd/checkshare/checkshare.go"],
     deps: ["compliance-module"],
-    testSrcs: ["cmd/checkshare_test.go"],
+    testSrcs: ["cmd/checkshare/checkshare_test.go"],
+}
+
+blueprint_go_binary {
+    name: "compliancenotice_bom",
+    srcs: ["cmd/bom/bom.go"],
+    deps: ["compliance-module"],
+    testSrcs: ["cmd/bom/bom_test.go"],
+}
+
+blueprint_go_binary {
+    name: "compliancenotice_shippedlibs",
+    srcs: ["cmd/shippedlibs/shippedlibs.go"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
+    testSrcs: ["cmd/shippedlibs/shippedlibs_test.go"],
 }
 
 blueprint_go_binary {
     name: "listshare",
-    srcs: ["cmd/listshare.go"],
+    srcs: ["cmd/listshare/listshare.go"],
     deps: ["compliance-module"],
-    testSrcs: ["cmd/listshare_test.go"],
+    testSrcs: ["cmd/listshare/listshare_test.go"],
 }
 
 blueprint_go_binary {
     name: "dumpgraph",
-    srcs: ["cmd/dumpgraph.go"],
+    srcs: ["cmd/dumpgraph/dumpgraph.go"],
     deps: ["compliance-module"],
-    testSrcs: ["cmd/dumpgraph_test.go"],
+    testSrcs: ["cmd/dumpgraph/dumpgraph_test.go"],
 }
 
 blueprint_go_binary {
     name: "dumpresolutions",
-    srcs: ["cmd/dumpresolutions.go"],
+    srcs: ["cmd/dumpresolutions/dumpresolutions.go"],
     deps: ["compliance-module"],
-    testSrcs: ["cmd/dumpresolutions_test.go"],
+    testSrcs: ["cmd/dumpresolutions/dumpresolutions_test.go"],
+}
+
+blueprint_go_binary {
+    name: "htmlnotice",
+    srcs: ["cmd/htmlnotice/htmlnotice.go"],
+    deps: [
+        "compliance-module",
+        "blueprint-deptools",
+    ],
+    testSrcs: ["cmd/htmlnotice/htmlnotice_test.go"],
+}
+
+blueprint_go_binary {
+    name: "rtrace",
+    srcs: ["cmd/rtrace/rtrace.go"],
+    deps: ["compliance-module"],
+    testSrcs: ["cmd/rtrace/rtrace_test.go"],
+}
+
+blueprint_go_binary {
+    name: "textnotice",
+    srcs: ["cmd/textnotice/textnotice.go"],
+    deps: [
+        "compliance-module",
+        "blueprint-deptools",
+    ],
+    testSrcs: ["cmd/textnotice/textnotice_test.go"],
+}
+
+blueprint_go_binary {
+    name: "xmlnotice",
+    srcs: ["cmd/xmlnotice/xmlnotice.go"],
+    deps: [
+        "compliance-module",
+        "blueprint-deptools",
+    ],
+    testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
 }
 
 bootstrap_go_package {
@@ -52,14 +106,15 @@
         "conditionset.go",
         "doc.go",
         "graph.go",
-        "policy/policy.go",
-        "policy/resolve.go",
-        "policy/resolvenotices.go",
-        "policy/resolveshare.go",
-        "policy/resolveprivacy.go",
-        "policy/shareprivacyconflicts.go",
-        "policy/shipped.go",
-        "policy/walk.go",
+        "noticeindex.go",
+        "policy_policy.go",
+        "policy_resolve.go",
+        "policy_resolvenotices.go",
+        "policy_resolveshare.go",
+        "policy_resolveprivacy.go",
+        "policy_shareprivacyconflicts.go",
+        "policy_shipped.go",
+        "policy_walk.go",
         "readgraph.go",
         "resolution.go",
         "resolutionset.go",
@@ -68,14 +123,14 @@
         "condition_test.go",
         "conditionset_test.go",
         "readgraph_test.go",
-        "policy/policy_test.go",
-        "policy/resolve_test.go",
-        "policy/resolvenotices_test.go",
-        "policy/resolveshare_test.go",
-        "policy/resolveprivacy_test.go",
-        "policy/shareprivacyconflicts_test.go",
-        "policy/shipped_test.go",
-        "policy/walk_test.go",
+        "policy_policy_test.go",
+        "policy_resolve_test.go",
+        "policy_resolvenotices_test.go",
+        "policy_resolveshare_test.go",
+        "policy_resolveprivacy_test.go",
+        "policy_shareprivacyconflicts_test.go",
+        "policy_shipped_test.go",
+        "policy_walk_test.go",
         "resolutionset_test.go",
         "test_util.go",
     ],
@@ -84,5 +139,5 @@
         "golang-protobuf-encoding-prototext",
         "license_metadata_proto",
     ],
-    pkgPath: "compliance",
+    pkgPath: "android/soong/tools/compliance",
 }
diff --git a/tools/compliance/cmd/bom/bom.go b/tools/compliance/cmd/bom/bom.go
new file mode 100644
index 0000000..5363a59
--- /dev/null
+++ b/tools/compliance/cmd/bom/bom.go
@@ -0,0 +1,167 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/tools/compliance"
+)
+
+var (
+	outputFile  = flag.String("o", "-", "Where to write the bill of materials. (default stdout)")
+	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout      io.Writer
+	stderr      io.Writer
+	rootFS      fs.FS
+	stripPrefix []string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+func init() {
+	flag.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a bill of materials. i.e. the list of installed paths.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flag.PrintDefaults()
+	}
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	flag.Parse()
+
+	// Must specify at least one root target.
+	if flag.NArg() == 0 {
+		flag.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flag.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	if *outputFile != "-" {
+		ofile = &bytes.Buffer{}
+	}
+
+	ctx := &context{ofile, os.Stderr, os.DirFS("."), *stripPrefix}
+
+	err := billOfMaterials(ctx, flag.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flag.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, ofile.(*bytes.Buffer).Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// billOfMaterials implements the bom utility.
+func billOfMaterials(ctx *context, files ...string) error {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+	if err != nil {
+		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return failNoLicenses
+	}
+
+	// rs contains all notice resolutions.
+	rs := compliance.ResolveNotices(licenseGraph)
+
+	ni, err := compliance.IndexLicenseTexts(ctx.rootFS, licenseGraph, rs)
+	if err != nil {
+		return fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	for path := range ni.InstallPaths() {
+		fmt.Fprintln(ctx.stdout, ctx.strip(path))
+	}
+	return nil
+}
diff --git a/tools/compliance/cmd/bom/bom_test.go b/tools/compliance/cmd/bom/bom_test.go
new file mode 100644
index 0000000..4a9889f
--- /dev/null
+++ b/tools/compliance/cmd/bom/bom_test.go
@@ -0,0 +1,319 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"os"
+	"strings"
+	"testing"
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition   string
+		name        string
+		roots       []string
+		stripPrefix string
+		expectedOut []string
+	}{
+		{
+			condition:   "firstparty",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			stripPrefix: "out/target/product/fictional",
+			expectedOut: []string{
+				"/system/apex/highest.apex",
+				"/system/apex/highest.apex/bin/bin1",
+				"/system/apex/highest.apex/bin/bin2",
+				"/system/apex/highest.apex/lib/liba.so",
+				"/system/apex/highest.apex/lib/libb.so",
+			},
+		},
+		{
+			condition:   "firstparty",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			stripPrefix: "out/target/product/fictional/data/",
+			expectedOut: []string{
+				"container.zip",
+				"container.zip/bin1",
+				"container.zip/bin2",
+				"container.zip/liba.so",
+				"container.zip/libb.so",
+			},
+		},
+		{
+			condition:   "firstparty",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			stripPrefix: "out/target/product/fictional/bin/",
+			expectedOut: []string{"application"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"bin/bin1"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"lib/libd.so"},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{
+				"out/target/product/fictional/system/apex/highest.apex",
+				"out/target/product/fictional/system/apex/highest.apex/bin/bin1",
+				"out/target/product/fictional/system/apex/highest.apex/bin/bin2",
+				"out/target/product/fictional/system/apex/highest.apex/lib/liba.so",
+				"out/target/product/fictional/system/apex/highest.apex/lib/libb.so",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{
+				"out/target/product/fictional/data/container.zip",
+				"out/target/product/fictional/data/container.zip/bin1",
+				"out/target/product/fictional/data/container.zip/bin2",
+				"out/target/product/fictional/data/container.zip/liba.so",
+				"out/target/product/fictional/data/container.zip/libb.so",
+			},
+		},
+		{
+			condition:   "notice",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"out/target/product/fictional/bin/application"},
+		},
+		{
+			condition:   "notice",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"out/target/product/fictional/system/bin/bin1"},
+		},
+		{
+			condition:   "notice",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"out/target/product/fictional/system/lib/libd.so"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/apex/",
+			expectedOut: []string{
+				"highest.apex",
+				"highest.apex/bin/bin1",
+				"highest.apex/bin/bin2",
+				"highest.apex/lib/liba.so",
+				"highest.apex/lib/libb.so",
+			},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			stripPrefix: "out/target/product/fictional/data/",
+			expectedOut: []string{
+				"container.zip",
+				"container.zip/bin1",
+				"container.zip/bin2",
+				"container.zip/liba.so",
+				"container.zip/libb.so",
+			},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			stripPrefix: "out/target/product/fictional/bin/",
+			expectedOut: []string{"application"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"bin/bin1"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"lib/libd.so"},
+		},
+		{
+			condition:   "restricted",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/apex/",
+			expectedOut: []string{
+				"highest.apex",
+				"highest.apex/bin/bin1",
+				"highest.apex/bin/bin2",
+				"highest.apex/lib/liba.so",
+				"highest.apex/lib/libb.so",
+			},
+		},
+		{
+			condition:   "restricted",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			stripPrefix: "out/target/product/fictional/data/",
+			expectedOut: []string{
+				"container.zip",
+				"container.zip/bin1",
+				"container.zip/bin2",
+				"container.zip/liba.so",
+				"container.zip/libb.so",
+			},
+		},
+		{
+			condition:   "restricted",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			stripPrefix: "out/target/product/fictional/bin/",
+			expectedOut: []string{"application"},
+		},
+		{
+			condition:   "restricted",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"bin/bin1"},
+		},
+		{
+			condition:   "restricted",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"lib/libd.so"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/apex/",
+			expectedOut: []string{
+				"highest.apex",
+				"highest.apex/bin/bin1",
+				"highest.apex/bin/bin2",
+				"highest.apex/lib/liba.so",
+				"highest.apex/lib/libb.so",
+			},
+		},
+		{
+			condition:   "proprietary",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			stripPrefix: "out/target/product/fictional/data/",
+			expectedOut: []string{
+				"container.zip",
+				"container.zip/bin1",
+				"container.zip/bin2",
+				"container.zip/liba.so",
+				"container.zip/libb.so",
+			},
+		},
+		{
+			condition:   "proprietary",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			stripPrefix: "out/target/product/fictional/bin/",
+			expectedOut: []string{"application"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"bin/bin1"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/",
+			expectedOut: []string{"lib/libd.so"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			ctx := context{stdout, stderr, os.DirFS("."), []string{tt.stripPrefix}}
+
+			err := billOfMaterials(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("bom: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("bom: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", strings.Join(tt.expectedOut, "\n"))
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("bom: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if tt.expectedOut[lineno] != line {
+					t.Errorf("bom: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno])
+				}
+				lineno++
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("bom: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno])
+			}
+		})
+	}
+}
diff --git a/tools/compliance/cmd/checkshare.go b/tools/compliance/cmd/checkshare/checkshare.go
similarity index 95%
rename from tools/compliance/cmd/checkshare.go
rename to tools/compliance/cmd/checkshare/checkshare.go
index 5114a28..752d14b 100644
--- a/tools/compliance/cmd/checkshare.go
+++ b/tools/compliance/cmd/checkshare/checkshare.go
@@ -15,13 +15,14 @@
 package main
 
 import (
-	"compliance"
 	"flag"
 	"fmt"
 	"io"
 	"os"
 	"path/filepath"
 	"sort"
+
+	"android/soong/tools/compliance"
 )
 
 func init() {
@@ -46,12 +47,11 @@
 }
 
 var (
-	failConflicts = fmt.Errorf("conflicts")
+	failConflicts     = fmt.Errorf("conflicts")
 	failNoneRequested = fmt.Errorf("\nNo metadata files requested")
-	failNoLicenses = fmt.Errorf("No licenses")
+	failNoLicenses    = fmt.Errorf("No licenses")
 )
 
-
 // byError orders conflicts by error string
 type byError []compliance.SourceSharePrivacyConflict
 
diff --git a/tools/compliance/cmd/checkshare_test.go b/tools/compliance/cmd/checkshare/checkshare_test.go
similarity index 95%
rename from tools/compliance/cmd/checkshare_test.go
rename to tools/compliance/cmd/checkshare/checkshare_test.go
index 5036aa5..c9b62e1 100644
--- a/tools/compliance/cmd/checkshare_test.go
+++ b/tools/compliance/cmd/checkshare/checkshare_test.go
@@ -17,10 +17,21 @@
 import (
 	"bytes"
 	"fmt"
+	"os"
 	"strings"
 	"testing"
 )
 
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
 type outcome struct {
 	target           string
 	privacyCondition string
@@ -248,7 +259,7 @@
 				if len(ts) < 1 {
 					continue
 				}
-				if 0 < len(actualStdout) {
+				if len(actualStdout) > 0 {
 					t.Errorf("checkshare: unexpected multiple output lines %q, want %q", actualStdout+"\n"+ts, tt.expectedStdout)
 				}
 				actualStdout = ts
diff --git a/tools/compliance/cmd/dumpgraph.go b/tools/compliance/cmd/dumpgraph/dumpgraph.go
similarity index 83%
rename from tools/compliance/cmd/dumpgraph.go
rename to tools/compliance/cmd/dumpgraph/dumpgraph.go
index 1ee63b2..fa16b1b 100644
--- a/tools/compliance/cmd/dumpgraph.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph.go
@@ -15,7 +15,6 @@
 package main
 
 import (
-	"compliance"
 	"flag"
 	"fmt"
 	"io"
@@ -23,21 +22,36 @@
 	"path/filepath"
 	"sort"
 	"strings"
+
+	"android/soong/tools/compliance"
 )
 
 var (
 	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
 	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = flag.String("strip_prefix", "", "Prefix to remove from paths. i.e. path to root")
+	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
-	failNoLicenses = fmt.Errorf("No licenses found")
+	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
 type context struct {
 	graphViz        bool
 	labelConditions bool
-	stripPrefix     string
+	stripPrefix     []string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
 }
 
 func init() {
@@ -59,6 +73,19 @@
 	}
 }
 
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
 func main() {
 	flag.Parse()
 
@@ -106,7 +133,7 @@
 
 	// targetOut calculates the string to output for `target` separating conditions as needed using `sep`.
 	targetOut := func(target *compliance.TargetNode, sep string) string {
-		tOut := strings.TrimPrefix(target.Name(), ctx.stripPrefix)
+		tOut := ctx.strip(target.Name())
 		if ctx.labelConditions {
 			conditions := target.LicenseConditions().Names()
 			sort.Strings(conditions)
diff --git a/tools/compliance/cmd/dumpgraph_test.go b/tools/compliance/cmd/dumpgraph/dumpgraph_test.go
similarity index 96%
rename from tools/compliance/cmd/dumpgraph_test.go
rename to tools/compliance/cmd/dumpgraph/dumpgraph_test.go
index 3055022..67b2b40 100644
--- a/tools/compliance/cmd/dumpgraph_test.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph_test.go
@@ -17,10 +17,21 @@
 import (
 	"bytes"
 	"fmt"
+	"os"
 	"strings"
 	"testing"
 )
 
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
 func Test_plaintext(t *testing.T) {
 	tests := []struct {
 		condition   string
@@ -48,7 +59,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/"},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic static",
 				"bin/bin1.meta_lic lib/libc.a.meta_lic static",
@@ -64,7 +75,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:notice static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:notice static",
@@ -135,7 +146,7 @@
 			condition: "notice",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/"},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic static",
 				"bin/bin1.meta_lic lib/libc.a.meta_lic static",
@@ -151,7 +162,7 @@
 			condition: "notice",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:notice static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:notice static",
@@ -222,7 +233,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/"},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic static",
 				"bin/bin1.meta_lic lib/libc.a.meta_lic static",
@@ -238,7 +249,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:reciprocal static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:reciprocal static",
@@ -309,7 +320,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/"},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic static",
 				"bin/bin1.meta_lic lib/libc.a.meta_lic static",
@@ -325,7 +336,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:reciprocal static",
@@ -396,7 +407,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/"},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic static",
 				"bin/bin1.meta_lic lib/libc.a.meta_lic static",
@@ -412,7 +423,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:by_exception_only:proprietary static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:by_exception_only:proprietary static",
@@ -602,7 +613,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/"},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("bin/bin2.meta_lic"),
@@ -625,7 +636,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
@@ -724,7 +735,7 @@
 			condition: "notice",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/"},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("bin/bin2.meta_lic"),
@@ -747,7 +758,7 @@
 			condition: "notice",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
@@ -846,7 +857,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/"},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("bin/bin2.meta_lic"),
@@ -869,7 +880,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
@@ -968,7 +979,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/"},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("bin/bin2.meta_lic"),
@@ -991,7 +1002,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
@@ -1090,7 +1101,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/"},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("bin/bin2.meta_lic"),
@@ -1113,7 +1124,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "by_exception_only", "proprietary"),
@@ -1217,7 +1228,7 @@
 			outList := strings.Split(stdout.String(), "\n")
 			outLine := 0
 			if outList[outLine] != "strict digraph {" {
-				t.Errorf("dumpgraph: got 1st line %v, want strict digraph {")
+				t.Errorf("dumpgraph: got 1st line %v, want strict digraph {", outList[outLine])
 			}
 			outLine++
 			if strings.HasPrefix(strings.TrimLeft(outList[outLine], " \t"), "rankdir") {
diff --git a/tools/compliance/cmd/dumpresolutions.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
similarity index 92%
rename from tools/compliance/cmd/dumpresolutions.go
rename to tools/compliance/cmd/dumpresolutions/dumpresolutions.go
index 318cd91..9c5e972 100644
--- a/tools/compliance/cmd/dumpresolutions.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
@@ -15,7 +15,6 @@
 package main
 
 import (
-	"compliance"
 	"flag"
 	"fmt"
 	"io"
@@ -23,23 +22,38 @@
 	"path/filepath"
 	"sort"
 	"strings"
+
+	"android/soong/tools/compliance"
 )
 
 var (
 	conditions      = newMultiString("c", "License condition to resolve. (may be given multiple times)")
 	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
 	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = flag.String("strip_prefix", "", "Prefix to remove from paths. i.e. path to root")
+	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
-	failNoLicenses = fmt.Errorf("No licenses found")
+	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
 type context struct {
 	conditions      []compliance.LicenseCondition
 	graphViz        bool
 	labelConditions bool
-	stripPrefix     string
+	stripPrefix     []string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
 }
 
 func init() {
@@ -139,7 +153,7 @@
 
 	// targetOut calculates the string to output for `target` adding `sep`-separated conditions as needed.
 	targetOut := func(target *compliance.TargetNode, sep string) string {
-		tOut := strings.TrimPrefix(target.Name(), ctx.stripPrefix)
+		tOut := ctx.strip(target.Name())
 		if ctx.labelConditions {
 			conditions := target.LicenseConditions().Names()
 			if len(conditions) > 0 {
diff --git a/tools/compliance/cmd/dumpresolutions_test.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
similarity index 95%
rename from tools/compliance/cmd/dumpresolutions_test.go
rename to tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
index d904671..6fe1e8a 100644
--- a/tools/compliance/cmd/dumpresolutions_test.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
@@ -16,12 +16,24 @@
 
 import (
 	"bytes"
-	"compliance"
 	"fmt"
+	"os"
 	"strings"
 	"testing"
+
+	"android/soong/tools/compliance"
 )
 
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
 func Test_plaintext(t *testing.T) {
 	tests := []struct {
 		condition   string
@@ -53,7 +65,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/"},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
 				"bin/bin1.meta_lic lib/liba.so.meta_lic notice",
@@ -75,7 +87,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/firstparty/",
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
@@ -97,8 +109,8 @@
 			name:      "apex_trimmed_share",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/firstparty/",
+				conditions:  compliance.ImpliesShared.AsList(),
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []string{},
 		},
@@ -107,8 +119,8 @@
 			name:      "apex_trimmed_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/firstparty/",
+				conditions:  compliance.ImpliesPrivate.AsList(),
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []string{},
 		},
@@ -117,8 +129,8 @@
 			name:      "apex_trimmed_share_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions:  append(compliance.ImpliesPrivate.AsList(),compliance.ImpliesShared.AsList()...),
-				stripPrefix: "testdata/firstparty/",
+				conditions:  append(compliance.ImpliesPrivate.AsList(), compliance.ImpliesShared.AsList()...),
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []string{},
 		},
@@ -126,7 +138,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice",
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:notice notice",
@@ -211,7 +223,7 @@
 			condition: "notice",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/"},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
 				"bin/bin1.meta_lic lib/liba.so.meta_lic notice",
@@ -233,7 +245,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/notice/",
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
@@ -255,8 +267,8 @@
 			name:      "apex_trimmed_share",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/notice/",
+				conditions:  compliance.ImpliesShared.AsList(),
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []string{},
 		},
@@ -265,8 +277,8 @@
 			name:      "apex_trimmed_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/notice/",
+				conditions:  compliance.ImpliesPrivate.AsList(),
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []string{},
 		},
@@ -275,8 +287,8 @@
 			name:      "apex_trimmed_share_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions:  append(compliance.ImpliesShared.AsList(),compliance.ImpliesPrivate.AsList()...),
-				stripPrefix: "testdata/notice/",
+				conditions:  append(compliance.ImpliesShared.AsList(), compliance.ImpliesPrivate.AsList()...),
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []string{},
 		},
@@ -284,7 +296,7 @@
 			condition: "notice",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice",
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:notice notice",
@@ -369,7 +381,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/"},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
 				"bin/bin1.meta_lic lib/liba.so.meta_lic reciprocal",
@@ -391,7 +403,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/reciprocal/",
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
@@ -408,8 +420,8 @@
 			name:      "apex_trimmed_share",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/reciprocal/",
+				conditions:  compliance.ImpliesShared.AsList(),
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic reciprocal",
@@ -424,8 +436,8 @@
 			name:      "apex_trimmed_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/reciprocal/",
+				conditions:  compliance.ImpliesPrivate.AsList(),
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []string{},
 		},
@@ -434,8 +446,8 @@
 			name:      "apex_trimmed_share_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: append(compliance.ImpliesShared.AsList(),compliance.ImpliesPrivate.AsList()...),
-				stripPrefix: "testdata/reciprocal/",
+				conditions:  append(compliance.ImpliesShared.AsList(), compliance.ImpliesPrivate.AsList()...),
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic reciprocal",
@@ -449,7 +461,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice",
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:reciprocal reciprocal",
@@ -535,7 +547,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/"},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
 				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
@@ -558,7 +570,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/restricted/",
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
@@ -573,8 +585,8 @@
 			name:      "apex_trimmed_share",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/restricted/",
+				conditions:  compliance.ImpliesShared.AsList(),
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
@@ -597,8 +609,8 @@
 			name:      "apex_trimmed_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/restricted/",
+				conditions:  compliance.ImpliesPrivate.AsList(),
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{},
 		},
@@ -607,8 +619,8 @@
 			name:      "apex_trimmed_share_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions:  append(compliance.ImpliesShared.AsList(),compliance.ImpliesPrivate.AsList()...),
-				stripPrefix: "testdata/restricted/",
+				conditions:  append(compliance.ImpliesShared.AsList(), compliance.ImpliesPrivate.AsList()...),
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
@@ -630,7 +642,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice:restricted_allows_dynamic_linking",
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking restricted_allows_dynamic_linking",
@@ -718,7 +730,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/"},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
 				"bin/bin1.meta_lic lib/liba.so.meta_lic proprietary:by_exception_only",
@@ -741,7 +753,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/proprietary/",
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic bin/bin1.meta_lic notice",
@@ -754,8 +766,8 @@
 			name:      "apex_trimmed_share",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/proprietary/",
+				conditions:  compliance.ImpliesShared.AsList(),
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []string{
 				"bin/bin2.meta_lic bin/bin2.meta_lic restricted",
@@ -771,8 +783,8 @@
 			name:      "apex_trimmed_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions: compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/proprietary/",
+				conditions:  compliance.ImpliesPrivate.AsList(),
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic proprietary",
@@ -789,8 +801,8 @@
 			name:      "apex_trimmed_share_private",
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
-				conditions:  append(compliance.ImpliesShared.AsList(),compliance.ImpliesPrivate.AsList()...),
-				stripPrefix: "testdata/proprietary/",
+				conditions:  append(compliance.ImpliesShared.AsList(), compliance.ImpliesPrivate.AsList()...),
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []string{
 				"bin/bin1.meta_lic lib/liba.so.meta_lic proprietary",
@@ -810,7 +822,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}, labelConditions: true},
 			expectedOut: []string{
 				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice",
 				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:proprietary:by_exception_only proprietary:by_exception_only",
@@ -907,7 +919,7 @@
 				for len(outList) > startLine && len(expectedList) > startLine && outList[startLine] == expectedList[startLine] {
 					startLine++
 				}
-				t.Errorf("listshare: gotStdout = %v, want %v, somewhere near line %d Stdout = %v, want %v",
+				t.Errorf("dumpresoliutions: gotStdout = %v, want %v, somewhere near line %d Stdout = %v, want %v",
 					out, expected, startLine+1, outList[startLine], expectedList[startLine])
 			}
 		})
@@ -930,7 +942,7 @@
 }
 
 // newTestCondition constructs a test license condition in the license graph.
-func newTestCondition(lg *compliance.LicenseGraph, conditionName... string) compliance.LicenseConditionSet {
+func newTestCondition(lg *compliance.LicenseGraph, conditionName ...string) compliance.LicenseConditionSet {
 	cs := compliance.NewLicenseConditionSet()
 	for _, name := range conditionName {
 		cs = cs.Plus(compliance.RecognizedConditionNames[name])
@@ -1068,7 +1080,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/"},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("lib/liba.so.meta_lic"),
@@ -1132,7 +1144,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/firstparty/",
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -1197,7 +1209,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/firstparty/",
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1207,7 +1219,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/firstparty/",
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1217,7 +1229,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.Union(compliance.ImpliesPrivate).AsList(),
-				stripPrefix: "testdata/firstparty/",
+				stripPrefix: []string{"testdata/firstparty/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1225,7 +1237,7 @@
 			condition: "firstparty",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/firstparty/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/firstparty/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("lib/liba.so.meta_lic", "notice"),
@@ -1460,7 +1472,7 @@
 			condition: "notice",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/"},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("lib/liba.so.meta_lic"),
@@ -1524,7 +1536,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/notice/",
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -1589,7 +1601,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/notice/",
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1599,7 +1611,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/notice/",
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1609,7 +1621,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.Union(compliance.ImpliesPrivate).AsList(),
-				stripPrefix: "testdata/notice/",
+				stripPrefix: []string{"testdata/notice/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -1617,7 +1629,7 @@
 			condition: "notice",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/notice/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/notice/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("lib/liba.so.meta_lic", "notice"),
@@ -1852,7 +1864,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/"},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("lib/liba.so.meta_lic"),
@@ -1916,7 +1928,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/reciprocal/",
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -1959,7 +1971,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/reciprocal/",
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -1994,7 +2006,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/reciprocal/",
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -2004,7 +2016,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.Union(compliance.ImpliesPrivate).AsList(),
-				stripPrefix: "testdata/reciprocal/",
+				stripPrefix: []string{"testdata/reciprocal/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -2037,7 +2049,7 @@
 			condition: "reciprocal",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/reciprocal/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/reciprocal/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("lib/liba.so.meta_lic", "reciprocal"),
@@ -2284,7 +2296,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/"},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("lib/liba.so.meta_lic"),
@@ -2360,7 +2372,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/restricted/",
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -2394,7 +2406,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/restricted/",
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -2466,7 +2478,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/restricted/",
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []getMatcher{},
 		},
@@ -2476,7 +2488,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.Union(compliance.ImpliesPrivate).AsList(),
-				stripPrefix: "testdata/restricted/",
+				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -2546,7 +2558,7 @@
 			condition: "restricted",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/restricted/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("lib/liba.so.meta_lic", "restricted_allows_dynamic_linking"),
@@ -2824,7 +2836,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/"},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
 				matchTarget("lib/liba.so.meta_lic"),
@@ -2902,7 +2914,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  []compliance.LicenseCondition{compliance.NoticeCondition},
-				stripPrefix: "testdata/proprietary/",
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -2927,7 +2939,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.AsList(),
-				stripPrefix: "testdata/proprietary/",
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin2.meta_lic"),
@@ -2965,7 +2977,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesPrivate.AsList(),
-				stripPrefix: "testdata/proprietary/",
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -3009,7 +3021,7 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx: context{
 				conditions:  compliance.ImpliesShared.Union(compliance.ImpliesPrivate).AsList(),
-				stripPrefix: "testdata/proprietary/",
+				stripPrefix: []string{"testdata/proprietary/"},
 			},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic"),
@@ -3070,7 +3082,7 @@
 			condition: "proprietary",
 			name:      "apex_trimmed_labelled",
 			roots:     []string{"highest.apex.meta_lic"},
-			ctx:       context{stripPrefix: "testdata/proprietary/", labelConditions: true},
+			ctx:       context{stripPrefix: []string{"testdata/proprietary/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("lib/liba.so.meta_lic", "by_exception_only", "proprietary"),
@@ -3305,7 +3317,7 @@
 			outList := strings.Split(stdout.String(), "\n")
 			outLine := 0
 			if outList[outLine] != "strict digraph {" {
-				t.Errorf("dumpresolutions: got 1st line %v, want strict digraph {")
+				t.Errorf("dumpresolutions: got 1st line %v, want strict digraph {", outList[outLine])
 			}
 			outLine++
 			if strings.HasPrefix(strings.TrimLeft(outList[outLine], " \t"), "rankdir") {
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
new file mode 100644
index 0000000..ffb0585
--- /dev/null
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -0,0 +1,259 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"compress/gzip"
+	"flag"
+	"fmt"
+	"html"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/tools/compliance"
+
+	"github.com/google/blueprint/deptools"
+)
+
+var (
+	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile    = flag.String("d", "", "Where to write the deps file")
+	includeTOC  = flag.Bool("toc", true, "Whether to include a table of contents.")
+	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title       = flag.String("title", "", "The title of the notice file.")
+
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout      io.Writer
+	stderr      io.Writer
+	rootFS      fs.FS
+	includeTOC  bool
+	product     string
+	stripPrefix []string
+	title       string
+	deps        *[]string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				p = ctx.product
+			}
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+func init() {
+	flag.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
+ends with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flag.PrintDefaults()
+	}
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	flag.Parse()
+
+	// Must specify at least one root target.
+	if flag.NArg() == 0 {
+		flag.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flag.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	var closer io.Closer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+	if strings.HasSuffix(*outputFile, ".gz") {
+		ofile, _ = gzip.NewWriterLevel(obuf, gzip.BestCompression)
+		closer = ofile.(io.Closer)
+	}
+
+	var deps []string
+
+	ctx := &context{ofile, os.Stderr, os.DirFS("."), *includeTOC, *product, *stripPrefix, *title, &deps}
+
+	err := htmlNotice(ctx, flag.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flag.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	if closer != nil {
+		closer.Close()
+	}
+
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+	if *depsFile != "" {
+		err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// htmlNotice implements the htmlnotice utility.
+func htmlNotice(ctx *context, files ...string) error {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+	if err != nil {
+		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return failNoLicenses
+	}
+
+	// rs contains all notice resolutions.
+	rs := compliance.ResolveNotices(licenseGraph)
+
+	ni, err := compliance.IndexLicenseTexts(ctx.rootFS, licenseGraph, rs)
+	if err != nil {
+		return fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	fmt.Fprintln(ctx.stdout, "<!DOCTYPE html>")
+	fmt.Fprintln(ctx.stdout, "<html><head>")
+	fmt.Fprintln(ctx.stdout, "<style type=\"text/css\">")
+	fmt.Fprintln(ctx.stdout, "body { padding: 2px; margin: 0; }")
+	fmt.Fprintln(ctx.stdout, "ul { list-style-type: none; margin: 0; padding: 0; }")
+	fmt.Fprintln(ctx.stdout, "li { padding-left: 1em; }")
+	fmt.Fprintln(ctx.stdout, ".file-list { margin-left: 1em; }")
+	fmt.Fprintln(ctx.stdout, "</style>")
+	if len(ctx.title) > 0 {
+		fmt.Fprintf(ctx.stdout, "<title>%s</title>\n", html.EscapeString(ctx.title))
+	} else if len(ctx.product) > 0 {
+		fmt.Fprintf(ctx.stdout, "<title>%s</title>\n", html.EscapeString(ctx.product))
+	}
+	fmt.Fprintln(ctx.stdout, "</head>")
+	fmt.Fprintln(ctx.stdout, "<body>")
+
+	if len(ctx.title) > 0 {
+		fmt.Fprintf(ctx.stdout, "  <h1>%s</h1>\n", html.EscapeString(ctx.title))
+	} else if len(ctx.product) > 0 {
+		fmt.Fprintf(ctx.stdout, "  <h1>%s</h1>\n", html.EscapeString(ctx.product))
+	}
+	ids := make(map[string]string)
+	if ctx.includeTOC {
+		fmt.Fprintln(ctx.stdout, "  <ul class=\"toc\">")
+		i := 0
+		for installPath := range ni.InstallPaths() {
+			id := fmt.Sprintf("id%d", i)
+			i++
+			ids[installPath] = id
+			fmt.Fprintf(ctx.stdout, "    <li id=\"%s\"><strong>%s</strong>\n      <ul>\n", id, html.EscapeString(ctx.strip(installPath)))
+			for _, h := range ni.InstallHashes(installPath) {
+				libs := ni.InstallHashLibs(installPath, h)
+				fmt.Fprintf(ctx.stdout, "        <li><a href=\"#%s\">%s</a>\n", h.String(), html.EscapeString(strings.Join(libs, ", ")))
+			}
+			fmt.Fprintln(ctx.stdout, "      </ul>")
+		}
+		fmt.Fprintln(ctx.stdout, "  </ul><!-- toc -->")
+	}
+	for h := range ni.Hashes() {
+		fmt.Fprintln(ctx.stdout, "  <hr>")
+		for _, libName := range ni.HashLibs(h) {
+			fmt.Fprintf(ctx.stdout, "  <strong>%s</strong> used by:\n    <ul class=\"file-list\">\n", html.EscapeString(libName))
+			for _, installPath := range ni.HashLibInstalls(h, libName) {
+				if id, ok := ids[installPath]; ok {
+					fmt.Fprintf(ctx.stdout, "      <li><a href=\"#%s\">%s</a>\n", id, html.EscapeString(ctx.strip(installPath)))
+				} else {
+					fmt.Fprintf(ctx.stdout, "      <li>%s\n", html.EscapeString(ctx.strip(installPath)))
+				}
+			}
+			fmt.Fprintf(ctx.stdout, "    </ul>\n")
+		}
+		fmt.Fprintf(ctx.stdout, "  </ul>\n  <a id=\"%s\"/><pre class=\"license-text\">", h.String())
+		fmt.Fprintln(ctx.stdout, html.EscapeString(string(ni.HashText(h))))
+		fmt.Fprintln(ctx.stdout, "  </pre><!-- license-text -->")
+	}
+	fmt.Fprintln(ctx.stdout, "</body></html>")
+
+	*ctx.deps = ni.InputNoticeFiles()
+
+	return nil
+}
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
new file mode 100644
index 0000000..1b01d16
--- /dev/null
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
@@ -0,0 +1,916 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"html"
+	"os"
+	"reflect"
+	"regexp"
+	"strings"
+	"testing"
+)
+
+var (
+	horizontalRule = regexp.MustCompile(`^\s*<hr>\s*$`)
+	bodyTag        = regexp.MustCompile(`^\s*<body>\s*$`)
+	boilerPlate    = regexp.MustCompile(`^\s*(?:<ul class="file-list">|<ul>|</.*)\s*$`)
+	tocTag         = regexp.MustCompile(`^\s*<ul class="toc">\s*$`)
+	libraryName    = regexp.MustCompile(`^\s*<strong>(.*)</strong>\s\s*used\s\s*by\s*:\s*$`)
+	licenseText    = regexp.MustCompile(`^\s*<a id="[^"]{32}"/><pre class="license-text">(.*)$`)
+	titleTag       = regexp.MustCompile(`^\s*<title>(.*)</title>\s*$`)
+	h1Tag          = regexp.MustCompile(`^\s*<h1>(.*)</h1>\s*$`)
+	usedByTarget   = regexp.MustCompile(`^\s*<li>(?:<a href="#id[0-9]+">)?((?:out/(?:[^/<]*/)+)[^/<]*)(?:</a>)?\s*$`)
+	installTarget  = regexp.MustCompile(`^\s*<li id="id[0-9]+"><strong>(.*)</strong>\s*$`)
+	libReference   = regexp.MustCompile(`^\s*<li><a href="#[^"]{32}">(.*)</a>\s*$`)
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition    string
+		name         string
+		roots        []string
+		includeTOC   bool
+		stripPrefix  string
+		title        string
+		expectedOut  []matcher
+		expectedDeps []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/liba.so"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition:  "firstparty",
+			name:       "apex+toc",
+			roots:      []string{"highest.apex.meta_lic"},
+			includeTOC: true,
+			expectedOut: []matcher{
+				toc{},
+				target{"highest.apex"},
+				uses{"Android"},
+				target{"highest.apex/bin/bin1"},
+				uses{"Android"},
+				target{"highest.apex/bin/bin2"},
+				uses{"Android"},
+				target{"highest.apex/lib/liba.so"},
+				uses{"Android"},
+				target{"highest.apex/lib/libb.so"},
+				uses{"Android"},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/liba.so"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "apex-with-title",
+			roots:     []string{"highest.apex.meta_lic"},
+			title:     "Emperor",
+			expectedOut: []matcher{
+				pageTitle{"Emperor"},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/liba.so"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition:  "firstparty",
+			name:       "apex-with-title+toc",
+			roots:      []string{"highest.apex.meta_lic"},
+			includeTOC: true,
+			title:      "Emperor",
+			expectedOut: []matcher{
+				pageTitle{"Emperor"},
+				toc{},
+				target{"highest.apex"},
+				uses{"Android"},
+				target{"highest.apex/bin/bin1"},
+				uses{"Android"},
+				target{"highest.apex/bin/bin2"},
+				uses{"Android"},
+				target{"highest.apex/lib/liba.so"},
+				uses{"Android"},
+				target{"highest.apex/lib/libb.so"},
+				uses{"Android"},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/liba.so"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/liba.so"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"lib/libd.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				restricted{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				restricted{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				restricted{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			var deps []string
+
+			ctx := context{stdout, stderr, os.DirFS("."), tt.includeTOC, "", []string{tt.stripPrefix}, tt.title, &deps}
+
+			err := htmlNotice(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("htmlnotice: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("htmlnotice: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			inBody := false
+			hasTitle := false
+			ttle, expectTitle := tt.expectedOut[0].(pageTitle)
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if !inBody {
+					if expectTitle {
+						if tl := checkTitle(line); len(tl) > 0 {
+							if tl != ttle.t {
+								t.Errorf("htmlnotice: unexpected title: got %q, want %q", tl, ttle.t)
+							}
+							hasTitle = true
+						}
+					}
+					if bodyTag.MatchString(line) {
+						inBody = true
+						if expectTitle && !hasTitle {
+							t.Errorf("htmlnotice: missing title: got no <title> tag, want <title>%s</title>", ttle.t)
+						}
+					}
+					continue
+				}
+				if boilerPlate.MatchString(line) {
+					continue
+				}
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("htmlnotice: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if !tt.expectedOut[lineno].isMatch(line) {
+					t.Errorf("htmlnotice: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno].String())
+				}
+				lineno++
+			}
+			if !inBody {
+				t.Errorf("htmlnotice: missing body: got no <body> tag, want <body> tag followed by %s", matcherList(tt.expectedOut).String())
+				return
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("htmlnotice: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno].String())
+			}
+
+			t.Logf("got deps: %q", deps)
+
+			t.Logf("want deps: %q", tt.expectedDeps)
+
+			if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+				t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+					strings.Join(w, "\n"), strings.Join(g, "\n"))
+			}
+		})
+	}
+}
+
+func checkTitle(line string) string {
+	groups := titleTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return ""
+	}
+	return groups[1]
+}
+
+type matcher interface {
+	isMatch(line string) bool
+	String() string
+}
+
+type pageTitle struct {
+	t string
+}
+
+func (m pageTitle) isMatch(line string) bool {
+	groups := h1Tag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == html.EscapeString(m.t)
+}
+
+func (m pageTitle) String() string {
+	return "  <h1>" + html.EscapeString(m.t) + "</h1>"
+}
+
+type toc struct{}
+
+func (m toc) isMatch(line string) bool {
+	return tocTag.MatchString(line)
+}
+
+func (m toc) String() string {
+	return `  <ul class="toc">`
+}
+
+type target struct {
+	name string
+}
+
+func (m target) isMatch(line string) bool {
+	groups := installTarget.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return strings.HasPrefix(groups[1], "out/") && strings.HasSuffix(groups[1], "/"+html.EscapeString(m.name))
+}
+
+func (m target) String() string {
+	return `  <li id="id#"><strong>` + html.EscapeString(m.name) + `</strong>`
+}
+
+type uses struct {
+	name string
+}
+
+func (m uses) isMatch(line string) bool {
+	groups := libReference.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == html.EscapeString(m.name)
+}
+
+func (m uses) String() string {
+	return `  <li><a href="#hash">` + html.EscapeString(m.name) + `</a>`
+}
+
+type hr struct{}
+
+func (m hr) isMatch(line string) bool {
+	return horizontalRule.MatchString(line)
+}
+
+func (m hr) String() string {
+	return "  <hr>"
+}
+
+type library struct {
+	name string
+}
+
+func (m library) isMatch(line string) bool {
+	groups := libraryName.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == html.EscapeString(m.name)
+}
+
+func (m library) String() string {
+	return "  <strong>" + html.EscapeString(m.name) + "</strong> used by:"
+}
+
+type usedBy struct {
+	name string
+}
+
+func (m usedBy) isMatch(line string) bool {
+	groups := usedByTarget.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return strings.HasPrefix(groups[1], "out/") && strings.HasSuffix(groups[1], "/"+html.EscapeString(m.name))
+}
+
+func (m usedBy) String() string {
+	return "  <li>out/.../" + html.EscapeString(m.name)
+}
+
+func matchesText(line, text string) bool {
+	groups := licenseText.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == html.EscapeString(text)
+}
+
+func expectedText(text string) string {
+	return `  <a href="#hash"/><pre class="license-text">` + html.EscapeString(text)
+}
+
+type firstParty struct{}
+
+func (m firstParty) isMatch(line string) bool {
+	return matchesText(line, "&&&First Party License&&&")
+}
+
+func (m firstParty) String() string {
+	return expectedText("&&&First Party License&&&")
+}
+
+type notice struct{}
+
+func (m notice) isMatch(line string) bool {
+	return matchesText(line, "%%%Notice License%%%")
+}
+
+func (m notice) String() string {
+	return expectedText("%%%Notice License%%%")
+}
+
+type reciprocal struct{}
+
+func (m reciprocal) isMatch(line string) bool {
+	return matchesText(line, "$$$Reciprocal License$$$")
+}
+
+func (m reciprocal) String() string {
+	return expectedText("$$$Reciprocal License$$$")
+}
+
+type restricted struct{}
+
+func (m restricted) isMatch(line string) bool {
+	return matchesText(line, "###Restricted License###")
+}
+
+func (m restricted) String() string {
+	return expectedText("###Restricted License###")
+}
+
+type proprietary struct{}
+
+func (m proprietary) isMatch(line string) bool {
+	return matchesText(line, "@@@Proprietary License@@@")
+}
+
+func (m proprietary) String() string {
+	return expectedText("@@@Proprietary License@@@")
+}
+
+type matcherList []matcher
+
+func (l matcherList) String() string {
+	var sb strings.Builder
+	for _, m := range l {
+		s := m.String()
+		if s[:3] == s[len(s)-3:] {
+			fmt.Fprintln(&sb)
+		}
+		fmt.Fprintf(&sb, "%s\n", s)
+		if s[:3] == s[len(s)-3:] {
+			fmt.Fprintln(&sb)
+		}
+	}
+	return sb.String()
+}
diff --git a/tools/compliance/cmd/listshare.go b/tools/compliance/cmd/listshare/listshare.go
similarity index 96%
rename from tools/compliance/cmd/listshare.go
rename to tools/compliance/cmd/listshare/listshare.go
index 5c58dc4..030caa7 100644
--- a/tools/compliance/cmd/listshare.go
+++ b/tools/compliance/cmd/listshare/listshare.go
@@ -15,7 +15,6 @@
 package main
 
 import (
-	"compliance"
 	"flag"
 	"fmt"
 	"io"
@@ -23,6 +22,8 @@
 	"path/filepath"
 	"sort"
 	"strings"
+
+	"android/soong/tools/compliance"
 )
 
 func init() {
@@ -41,7 +42,7 @@
 
 var (
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
-	failNoLicenses = fmt.Errorf("No licenses found")
+	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
 func main() {
diff --git a/tools/compliance/cmd/listshare_test.go b/tools/compliance/cmd/listshare/listshare_test.go
similarity index 97%
rename from tools/compliance/cmd/listshare_test.go
rename to tools/compliance/cmd/listshare/listshare_test.go
index 71a0be6..91e9a43 100644
--- a/tools/compliance/cmd/listshare_test.go
+++ b/tools/compliance/cmd/listshare/listshare_test.go
@@ -16,10 +16,22 @@
 
 import (
 	"bytes"
+	"fmt"
+	"os"
 	"strings"
 	"testing"
 )
 
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
 func Test(t *testing.T) {
 	type projectShare struct {
 		project    string
diff --git a/tools/compliance/cmd/rtrace/rtrace.go b/tools/compliance/cmd/rtrace/rtrace.go
new file mode 100644
index 0000000..7c63979
--- /dev/null
+++ b/tools/compliance/cmd/rtrace/rtrace.go
@@ -0,0 +1,198 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"flag"
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"android/soong/tools/compliance"
+)
+
+var (
+	sources         = newMultiString("rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
+	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoSources     = fmt.Errorf("\nNo projects or metadata files to trace back from")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	sources         []string
+	stripPrefix     []string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+func init() {
+	flag.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a space-separated Target ActsOn Origin Condition tuple for each
+resolution in the graph. When -dot flag given, outputs nodes and edges
+in graphviz directed graph format.
+
+If one or more '-c condition' conditions are given, outputs the
+resolution for the union of the conditions. Otherwise, outputs the
+resolution for all conditions.
+
+In plain text mode, when '-label_conditions' is requested, the Target
+and Origin have colon-separated license conditions appended:
+i.e. target:condition1:condition2 etc.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flag.PrintDefaults()
+	}
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	flag.Parse()
+
+	// Must specify at least one root target.
+	if flag.NArg() == 0 {
+		flag.Usage()
+		os.Exit(2)
+	}
+
+	if len(*sources) == 0 {
+		flag.Usage()
+		fmt.Fprintf(os.Stderr, "\nMust specify at least 1 --rtrace source.\n")
+		os.Exit(2)
+	}
+
+	ctx := &context{
+		sources:         *sources,
+		stripPrefix:     *stripPrefix,
+	}
+	_, err := traceRestricted(ctx, os.Stdout, os.Stderr, flag.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flag.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	os.Exit(0)
+}
+
+// traceRestricted implements the rtrace utility.
+func traceRestricted(ctx *context, stdout, stderr io.Writer, files ...string) (*compliance.LicenseGraph, error) {
+	if len(files) < 1 {
+		return nil, failNoneRequested
+	}
+
+	if len(ctx.sources) < 1 {
+		return nil, failNoSources
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(os.DirFS("."), stderr, files)
+	if err != nil {
+		return nil, fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return nil, failNoLicenses
+	}
+
+	sourceMap := make(map[string]struct{})
+	for _, source := range ctx.sources {
+		sourceMap[source] = struct{}{}
+	}
+
+	compliance.TraceTopDownConditions(licenseGraph, func(tn *compliance.TargetNode) compliance.LicenseConditionSet {
+		if _, isPresent := sourceMap[tn.Name()]; isPresent {
+			return compliance.ImpliesRestricted
+		}
+		for _, project := range tn.Projects() {
+			if _, isPresent := sourceMap[project]; isPresent {
+				return compliance.ImpliesRestricted
+			}
+		}
+		return compliance.NewLicenseConditionSet()
+	})
+
+	// targetOut calculates the string to output for `target` adding `sep`-separated conditions as needed.
+	targetOut := func(target *compliance.TargetNode, sep string) string {
+		tOut := ctx.strip(target.Name())
+		return tOut
+	}
+
+	// outputResolution prints a resolution in the requested format to `stdout`, where one can read
+	// a resolution as `tname` resolves conditions named in `cnames`.
+	// `tname` is the name of the target the resolution traces back to.
+	// `cnames` is the list of conditions to resolve.
+	outputResolution := func(tname string, cnames []string) {
+		// ... one edge per line with names in a colon-separated tuple.
+		fmt.Fprintf(stdout, "%s %s\n", tname, strings.Join(cnames, ":"))
+	}
+
+	// Sort the resolutions by targetname for repeatability/stability.
+	actions := compliance.WalkResolutionsForCondition(licenseGraph, compliance.ImpliesShared).AllActions()
+	targets := make(compliance.TargetNodeList, 0, len(actions))
+	for tn := range actions {
+		if tn.LicenseConditions().MatchesAnySet(compliance.ImpliesRestricted) {
+			targets = append(targets, tn)
+		}
+	}
+	sort.Sort(targets)
+
+	// Output the sorted targets.
+	for _, target := range targets {
+		var tname string
+		tname = targetOut(target, ":")
+
+		// cnames accumulates the list of condition names originating at a single origin that apply to `target`.
+		cnames := target.LicenseConditions().Names()
+
+		// Output 1 line for each attachesTo+actsOn combination.
+		outputResolution(tname, cnames)
+	}
+	fmt.Fprintf(stdout, "restricted conditions trace to %d targets\n", len(targets))
+	if 0 == len(targets) {
+		fmt.Fprintln(stdout, "  (check for typos in project names or metadata files)")
+	}
+	return licenseGraph, nil
+}
diff --git a/tools/compliance/cmd/rtrace/rtrace_test.go b/tools/compliance/cmd/rtrace/rtrace_test.go
new file mode 100644
index 0000000..a8eb884
--- /dev/null
+++ b/tools/compliance/cmd/rtrace/rtrace_test.go
@@ -0,0 +1,316 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"fmt"
+	"os"
+	"strings"
+	"testing"
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test_plaintext(t *testing.T) {
+	tests := []struct {
+		condition   string
+		name        string
+		roots       []string
+		ctx         context
+		expectedOut []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "firstparty",
+			name:      "apex_trimmed",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/firstparty/bin/bin1.meta_lic"},
+				stripPrefix: []string{"testdata/firstparty/"},
+			},
+			expectedOut: []string{},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "apex_trimmed",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/notice/bin/bin1.meta_lic"},
+				stripPrefix: []string{"testdata/notice/"},
+			},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex_trimmed",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/reciprocal/bin/bin1.meta_lic"},
+				stripPrefix: []string{"testdata/reciprocal/"},
+			},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{
+				"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/lib/libb.so.meta_lic restricted",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "apex_trimmed_bin1",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/restricted/bin/bin1.meta_lic"},
+				stripPrefix: []string{"testdata/restricted/"},
+			},
+			expectedOut: []string{"lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+		},
+		{
+			condition: "restricted",
+			name:      "apex_trimmed_bin2",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/restricted/bin/bin2.meta_lic"},
+				stripPrefix: []string{"testdata/restricted/"},
+			},
+			expectedOut: []string{"lib/libb.so.meta_lic restricted"},
+		},
+		{
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{
+				"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/lib/libb.so.meta_lic restricted",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"testdata/proprietary/lib/libb.so.meta_lic restricted"},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex_trimmed_bin1",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/proprietary/bin/bin1.meta_lic"},
+				stripPrefix: []string{"testdata/proprietary/"},
+			},
+			expectedOut: []string{},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex_trimmed_bin2",
+			roots:     []string{"highest.apex.meta_lic"},
+			ctx: context{
+				sources:     []string{"testdata/proprietary/bin/bin2.meta_lic"},
+				stripPrefix: []string{"testdata/proprietary/"},
+			},
+			expectedOut: []string{"lib/libb.so.meta_lic restricted"},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []string{"testdata/proprietary/lib/libb.so.meta_lic restricted"},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			expectedOut := &bytes.Buffer{}
+			for _, eo := range tt.expectedOut {
+				expectedOut.WriteString(eo)
+				expectedOut.WriteString("\n")
+			}
+			fmt.Fprintf(expectedOut, "restricted conditions trace to %d targets\n", len(tt.expectedOut))
+			if 0 == len(tt.expectedOut) {
+				fmt.Fprintln(expectedOut, "  (check for typos in project names or metadata files)")
+			}
+
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+			if len(tt.ctx.sources) < 1 {
+				tt.ctx.sources = rootFiles
+			}
+			_, err := traceRestricted(&tt.ctx, stdout, stderr, rootFiles...)
+			t.Logf("rtrace: stderr = %v", stderr)
+			t.Logf("rtrace: stdout = %v", stdout)
+			if err != nil {
+				t.Fatalf("rtrace: error = %v", err)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("rtrace: gotStderr = %v, want none", stderr)
+			}
+			out := stdout.String()
+			expected := expectedOut.String()
+			if out != expected {
+				outList := strings.Split(out, "\n")
+				expectedList := strings.Split(expected, "\n")
+				startLine := 0
+				for startLine < len(outList) && startLine < len(expectedList) && outList[startLine] == expectedList[startLine] {
+					startLine++
+				}
+				t.Errorf("rtrace: gotStdout = %v, want %v, somewhere near line %d Stdout = %v, want %v",
+					out, expected, startLine+1, outList[startLine], expectedList[startLine])
+			}
+		})
+	}
+}
diff --git a/tools/compliance/cmd/shippedlibs/shippedlibs.go b/tools/compliance/cmd/shippedlibs/shippedlibs.go
new file mode 100644
index 0000000..94b19f1
--- /dev/null
+++ b/tools/compliance/cmd/shippedlibs/shippedlibs.go
@@ -0,0 +1,168 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/response"
+	"android/soong/tools/compliance"
+)
+
+var (
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout io.Writer
+	stderr io.Writer
+	rootFS fs.FS
+}
+
+func init() {
+}
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	outputFile := flags.String("o", "-", "Where to write the library list. (default stdout)")
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a list of libraries used in the shipped images.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	err := flags.Parse(expandedArgs)
+	if err != nil {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "%v\n", err)
+	}
+
+	// Must specify at least one root target.
+	if flags.NArg() == 0 {
+		flags.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	if *outputFile != "-" {
+		ofile = &bytes.Buffer{}
+	}
+
+	ctx := &context{ofile, os.Stderr, os.DirFS(".")}
+
+	err = shippedLibs(ctx, flags.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flags.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, ofile.(*bytes.Buffer).Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// shippedLibs implements the shippedlibs utility.
+func shippedLibs(ctx *context, files ...string) error {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+	if err != nil {
+		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return failNoLicenses
+	}
+
+	// rs contains all notice resolutions.
+	rs := compliance.ResolveNotices(licenseGraph)
+
+	ni, err := compliance.IndexLicenseTexts(ctx.rootFS, licenseGraph, rs)
+	if err != nil {
+		return fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	for lib := range ni.Libraries() {
+		fmt.Fprintln(ctx.stdout, lib)
+	}
+	return nil
+}
diff --git a/tools/compliance/cmd/shippedlibs/shippedlibs_test.go b/tools/compliance/cmd/shippedlibs/shippedlibs_test.go
new file mode 100644
index 0000000..b6aad6d
--- /dev/null
+++ b/tools/compliance/cmd/shippedlibs/shippedlibs_test.go
@@ -0,0 +1,238 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"os"
+	"strings"
+	"testing"
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition   string
+		name        string
+		roots       []string
+		expectedOut []string
+	}{
+		{
+			condition:   "firstparty",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"Android"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			expectedOut: []string{"Android"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"Android"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"Android"},
+		},
+		{
+			condition:   "firstparty",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"Android"},
+		},
+		{
+			condition:   "notice",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "notice",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "notice",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"Android", "Device"},
+		},
+		{
+			condition:   "notice",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "notice",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"External"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"Android", "Device"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "reciprocal",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"External"},
+		},
+		{
+			condition:   "restricted",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "restricted",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "restricted",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"Android", "Device"},
+		},
+		{
+			condition:   "restricted",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "restricted",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"External"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"Android", "Device"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"Android", "Device", "External"},
+		},
+		{
+			condition:   "proprietary",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
+			expectedOut: []string{"External"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			ctx := context{stdout, stderr, os.DirFS(".")}
+
+			err := shippedLibs(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("shippedLibs: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("shippedLibs: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", strings.Join(tt.expectedOut, "\n"))
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("shippedLibs: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if tt.expectedOut[lineno] != line {
+					t.Errorf("shippedLibs: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno])
+				}
+				lineno++
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("shippedLibs: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno])
+			}
+		})
+	}
+}
diff --git a/tools/compliance/cmd/testdata/firstparty/FIRST_PARTY_LICENSE b/tools/compliance/cmd/testdata/firstparty/FIRST_PARTY_LICENSE
new file mode 100644
index 0000000..a7e7e64
--- /dev/null
+++ b/tools/compliance/cmd/testdata/firstparty/FIRST_PARTY_LICENSE
@@ -0,0 +1 @@
+&&&First Party License&&&
diff --git a/tools/compliance/cmd/testdata/firstparty/application.meta_lic b/tools/compliance/cmd/testdata/firstparty/application.meta_lic
index 58a1566..ac3338f 100644
--- a/tools/compliance/cmd/testdata/firstparty/application.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/application.meta_lic
@@ -3,7 +3,7 @@
 projects:  "distributable/application"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/application_intermediates/application"
 installed:  "out/target/product/fictional/bin/application"
diff --git a/tools/compliance/cmd/testdata/firstparty/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/firstparty/bin/bin1.meta_lic
index 34d81d9..3007129 100644
--- a/tools/compliance/cmd/testdata/firstparty/bin/bin1.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/bin/bin1.meta_lic
@@ -3,7 +3,7 @@
 projects:  "static/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
 installed:  "out/target/product/fictional/system/bin/bin1"
diff --git a/tools/compliance/cmd/testdata/firstparty/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/firstparty/bin/bin2.meta_lic
index 6154421..89bc6a4 100644
--- a/tools/compliance/cmd/testdata/firstparty/bin/bin2.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/bin/bin2.meta_lic
@@ -3,10 +3,10 @@
 projects:  "dynamic/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
-built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
-installed:  "out/target/product/fictional/system/bin/bin1"
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
 sources:  "out/target/product/fictional/system/lib/libb.so"
 sources:  "out/target/product/fictional/system/lib/libd.so"
 deps:  {
diff --git a/tools/compliance/cmd/testdata/firstparty/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/firstparty/bin/bin3.meta_lic
index 9b7908e..a81c764 100644
--- a/tools/compliance/cmd/testdata/firstparty/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/bin/bin3.meta_lic
@@ -3,7 +3,7 @@
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
 installed:  "out/target/product/fictional/system/bin/bin3"
diff --git a/tools/compliance/cmd/testdata/firstparty/container.zip.meta_lic b/tools/compliance/cmd/testdata/firstparty/container.zip.meta_lic
index 350b123..9f6a679 100644
--- a/tools/compliance/cmd/testdata/firstparty/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/container.zip.meta_lic
@@ -2,17 +2,17 @@
 projects:  "container/zip"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/firstparty/highest.apex.meta_lic b/tools/compliance/cmd/testdata/firstparty/highest.apex.meta_lic
index 53f81a2..abad5f1 100644
--- a/tools/compliance/cmd/testdata/firstparty/highest.apex.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/highest.apex.meta_lic
@@ -2,25 +2,25 @@
 projects:  "highest/apex"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/highest_intermediates/highest.apex"
 installed:  "out/target/product/fictional/system/apex/highest.apex"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/liba.so"
-  container_path:  "lib/liba.so"
+  container_path:  "/lib/liba.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/lib/libb.so"
-  container_path:  "lib/libb.so"
+  container_path:  "/lib/libb.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin1"
-  container_path:  "bin/bin1"
+  container_path:  "/bin/bin1"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin2"
-  container_path:  "bin/bin2"
+  container_path:  "/bin/bin2"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/firstparty/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/firstparty/lib/liba.so.meta_lic
index 7913af0..2985719 100644
--- a/tools/compliance/cmd/testdata/firstparty/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/lib/liba.so.meta_lic
@@ -2,7 +2,7 @@
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.a"
diff --git a/tools/compliance/cmd/testdata/firstparty/lib/libb.so.meta_lic b/tools/compliance/cmd/testdata/firstparty/lib/libb.so.meta_lic
index a4935d4..e60ef73 100644
--- a/tools/compliance/cmd/testdata/firstparty/lib/libb.so.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/lib/libb.so.meta_lic
@@ -2,7 +2,7 @@
 projects:  "base/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.a"
diff --git a/tools/compliance/cmd/testdata/firstparty/lib/libc.a.meta_lic b/tools/compliance/cmd/testdata/firstparty/lib/libc.a.meta_lic
index fa7459a..24d3f0d 100644
--- a/tools/compliance/cmd/testdata/firstparty/lib/libc.a.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/lib/libc.a.meta_lic
@@ -2,6 +2,6 @@
 projects:  "static/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libc.a"
diff --git a/tools/compliance/cmd/testdata/firstparty/lib/libd.so.meta_lic b/tools/compliance/cmd/testdata/firstparty/lib/libd.so.meta_lic
index a2db94a..f7e537c 100644
--- a/tools/compliance/cmd/testdata/firstparty/lib/libd.so.meta_lic
+++ b/tools/compliance/cmd/testdata/firstparty/lib/libd.so.meta_lic
@@ -2,7 +2,7 @@
 projects:  "dynamic/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libd.so"
 installed:  "out/target/product/fictional/system/lib/libd.so"
diff --git a/tools/compliance/cmd/testdata/notice/NOTICE_LICENSE b/tools/compliance/cmd/testdata/notice/NOTICE_LICENSE
new file mode 100644
index 0000000..752b249
--- /dev/null
+++ b/tools/compliance/cmd/testdata/notice/NOTICE_LICENSE
@@ -0,0 +1 @@
+%%%Notice License%%%
diff --git a/tools/compliance/cmd/testdata/notice/application.meta_lic b/tools/compliance/cmd/testdata/notice/application.meta_lic
index 56c60ef..8ce0a98 100644
--- a/tools/compliance/cmd/testdata/notice/application.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/application.meta_lic
@@ -3,7 +3,7 @@
 projects:  "distributable/application"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/application_intermediates/application"
 installed:  "out/target/product/fictional/bin/application"
diff --git a/tools/compliance/cmd/testdata/notice/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/notice/bin/bin1.meta_lic
index 9bede1b..6d173a4 100644
--- a/tools/compliance/cmd/testdata/notice/bin/bin1.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/bin/bin1.meta_lic
@@ -3,7 +3,7 @@
 projects:  "static/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
 installed:  "out/target/product/fictional/system/bin/bin1"
diff --git a/tools/compliance/cmd/testdata/notice/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/notice/bin/bin2.meta_lic
index 86e06c6..a9e9c71 100644
--- a/tools/compliance/cmd/testdata/notice/bin/bin2.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/bin/bin2.meta_lic
@@ -3,10 +3,10 @@
 projects:  "dynamic/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
-built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
-installed:  "out/target/product/fictional/system/bin/bin1"
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
 sources:  "out/target/product/fictional/system/lib/libb.so"
 sources:  "out/target/product/fictional/system/lib/libd.so"
 deps:  {
diff --git a/tools/compliance/cmd/testdata/notice/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/notice/bin/bin3.meta_lic
index 285d899..bb9a3d5 100644
--- a/tools/compliance/cmd/testdata/notice/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/bin/bin3.meta_lic
@@ -3,6 +3,7 @@
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-NCSA"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
 installed:  "out/target/product/fictional/system/bin/bin3"
diff --git a/tools/compliance/cmd/testdata/notice/container.zip.meta_lic b/tools/compliance/cmd/testdata/notice/container.zip.meta_lic
index e8af61c..e9c0511 100644
--- a/tools/compliance/cmd/testdata/notice/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/container.zip.meta_lic
@@ -2,17 +2,17 @@
 projects:  "container/zip"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/notice/highest.apex.meta_lic b/tools/compliance/cmd/testdata/notice/highest.apex.meta_lic
index 9b90aa5..2abb76e 100644
--- a/tools/compliance/cmd/testdata/notice/highest.apex.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/highest.apex.meta_lic
@@ -2,25 +2,25 @@
 projects:  "highest/apex"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/highest_intermediates/highest.apex"
 installed:  "out/target/product/fictional/system/apex/highest.apex"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/liba.so"
-  container_path:  "lib/liba.so"
+  container_path:  "/lib/liba.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/lib/libb.so"
-  container_path:  "lib/libb.so"
+  container_path:  "/lib/libb.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin1"
-  container_path:  "bin/bin1"
+  container_path:  "/bin/bin1"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin2"
-  container_path:  "bin/bin2"
+  container_path:  "/bin/bin2"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/notice/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/notice/lib/liba.so.meta_lic
index a69839f..7fed5d7 100644
--- a/tools/compliance/cmd/testdata/notice/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/lib/liba.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-BSD"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.a"
diff --git a/tools/compliance/cmd/testdata/notice/lib/libb.so.meta_lic b/tools/compliance/cmd/testdata/notice/lib/libb.so.meta_lic
index a4935d4..e60ef73 100644
--- a/tools/compliance/cmd/testdata/notice/lib/libb.so.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/lib/libb.so.meta_lic
@@ -2,7 +2,7 @@
 projects:  "base/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.a"
diff --git a/tools/compliance/cmd/testdata/notice/lib/libc.a.meta_lic b/tools/compliance/cmd/testdata/notice/lib/libc.a.meta_lic
index eb0f81f..8dbc41b 100644
--- a/tools/compliance/cmd/testdata/notice/lib/libc.a.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/lib/libc.a.meta_lic
@@ -2,5 +2,6 @@
 projects:  "static/library"
 license_kinds:  "SPDX-license-identifier-MIT"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libc.a"
diff --git a/tools/compliance/cmd/testdata/notice/lib/libd.so.meta_lic b/tools/compliance/cmd/testdata/notice/lib/libd.so.meta_lic
index 942d298..e6a060c 100644
--- a/tools/compliance/cmd/testdata/notice/lib/libd.so.meta_lic
+++ b/tools/compliance/cmd/testdata/notice/lib/libd.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "dynamic/library"
 license_kinds:  "SPDX-license-identifier-MIT"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libd.so"
 installed:  "out/target/product/fictional/system/lib/libd.so"
diff --git a/tools/compliance/cmd/testdata/proprietary/PROPRIETARY_LICENSE b/tools/compliance/cmd/testdata/proprietary/PROPRIETARY_LICENSE
new file mode 100644
index 0000000..5d0eb09
--- /dev/null
+++ b/tools/compliance/cmd/testdata/proprietary/PROPRIETARY_LICENSE
@@ -0,0 +1 @@
+@@@Proprietary License@@@
diff --git a/tools/compliance/cmd/testdata/proprietary/application.meta_lic b/tools/compliance/cmd/testdata/proprietary/application.meta_lic
index 51b97c5..f307c5c 100644
--- a/tools/compliance/cmd/testdata/proprietary/application.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/application.meta_lic
@@ -3,7 +3,7 @@
 projects:  "distributable/application"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/application_intermediates/application"
 installed:  "out/target/product/fictional/bin/application"
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin1.meta_lic
index c815858..e0394da 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin1.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin1.meta_lic
@@ -3,7 +3,7 @@
 projects:  "static/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
 installed:  "out/target/product/fictional/system/bin/bin1"
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin2.meta_lic
index 6b89ba4..da64aa6 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin2.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin2.meta_lic
@@ -4,9 +4,10 @@
 license_kinds:  "legacy_proprietary"
 license_conditions:  "proprietary"
 license_conditions:  "by_exception_only"
+license_texts:  "testdata/proprietary/PROPRIETARY_LICENSE"
 is_container:  false
-built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
-installed:  "out/target/product/fictional/system/bin/bin1"
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
 sources:  "out/target/product/fictional/system/lib/libb.so"
 sources:  "out/target/product/fictional/system/lib/libd.so"
 deps:  {
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
index f93553d..7ef14e9 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
@@ -3,6 +3,7 @@
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
 license_conditions:  "restricted"
+license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
 installed:  "out/target/product/fictional/system/bin/bin3"
diff --git a/tools/compliance/cmd/testdata/proprietary/container.zip.meta_lic b/tools/compliance/cmd/testdata/proprietary/container.zip.meta_lic
index 889e17e..d6605f4 100644
--- a/tools/compliance/cmd/testdata/proprietary/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/container.zip.meta_lic
@@ -2,17 +2,17 @@
 projects:  "container/zip"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/proprietary/highest.apex.meta_lic b/tools/compliance/cmd/testdata/proprietary/highest.apex.meta_lic
index d615404..27ced10 100644
--- a/tools/compliance/cmd/testdata/proprietary/highest.apex.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/highest.apex.meta_lic
@@ -2,25 +2,25 @@
 projects:  "highest/apex"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/highest_intermediates/highest.apex"
 installed:  "out/target/product/fictional/system/apex/highest.apex"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/liba.so"
-  container_path:  "lib/liba.so"
+  container_path:  "/lib/liba.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/lib/libb.so"
-  container_path:  "lib/libb.so"
+  container_path:  "/lib/libb.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin1"
-  container_path:  "bin/bin1"
+  container_path:  "/bin/bin1"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin2"
-  container_path:  "bin/bin2"
+  container_path:  "/bin/bin2"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/proprietary/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/proprietary/lib/liba.so.meta_lic
index 51141c8..ceb0f9f 100644
--- a/tools/compliance/cmd/testdata/proprietary/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/lib/liba.so.meta_lic
@@ -3,6 +3,7 @@
 license_kinds:  "legacy_proprietary"
 license_conditions:  "proprietary"
 license_conditions:  "by_exception_only"
+license_texts:  "testdata/proprietary/PROPRIETARY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.a"
diff --git a/tools/compliance/cmd/testdata/proprietary/lib/libb.so.meta_lic b/tools/compliance/cmd/testdata/proprietary/lib/libb.so.meta_lic
index c1b86d7..739d357 100644
--- a/tools/compliance/cmd/testdata/proprietary/lib/libb.so.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/lib/libb.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "base/library"
 license_kinds:  "SPDX-license-identifier-GPL-2.0"
 license_conditions:  "restricted"
+license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.a"
diff --git a/tools/compliance/cmd/testdata/proprietary/lib/libc.a.meta_lic b/tools/compliance/cmd/testdata/proprietary/lib/libc.a.meta_lic
index 1ade7da..5440ea7 100644
--- a/tools/compliance/cmd/testdata/proprietary/lib/libc.a.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/lib/libc.a.meta_lic
@@ -3,5 +3,6 @@
 license_kinds:  "legacy_proprietary"
 license_conditions:  "proprietary"
 license_conditions:  "by_exception_only"
+license_texts:  "testdata/proprietary/PROPRIETARY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libc.a"
diff --git a/tools/compliance/cmd/testdata/proprietary/lib/libd.so.meta_lic b/tools/compliance/cmd/testdata/proprietary/lib/libd.so.meta_lic
index 942d298..e6a060c 100644
--- a/tools/compliance/cmd/testdata/proprietary/lib/libd.so.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/lib/libd.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "dynamic/library"
 license_kinds:  "SPDX-license-identifier-MIT"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libd.so"
 installed:  "out/target/product/fictional/system/lib/libd.so"
diff --git a/tools/compliance/cmd/testdata/reciprocal/RECIPROCAL_LICENSE b/tools/compliance/cmd/testdata/reciprocal/RECIPROCAL_LICENSE
new file mode 100644
index 0000000..82c2019
--- /dev/null
+++ b/tools/compliance/cmd/testdata/reciprocal/RECIPROCAL_LICENSE
@@ -0,0 +1 @@
+$$$Reciprocal License$$$
diff --git a/tools/compliance/cmd/testdata/reciprocal/application.meta_lic b/tools/compliance/cmd/testdata/reciprocal/application.meta_lic
index 015c2d9..60233cb 100644
--- a/tools/compliance/cmd/testdata/reciprocal/application.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/application.meta_lic
@@ -3,7 +3,7 @@
 projects:  "distributable/application"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/application_intermediates/application"
 installed:  "out/target/product/fictional/bin/application"
diff --git a/tools/compliance/cmd/testdata/reciprocal/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/reciprocal/bin/bin1.meta_lic
index 4ebf653..54d552f 100644
--- a/tools/compliance/cmd/testdata/reciprocal/bin/bin1.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/bin/bin1.meta_lic
@@ -3,7 +3,7 @@
 projects:  "static/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
 installed:  "out/target/product/fictional/system/bin/bin1"
diff --git a/tools/compliance/cmd/testdata/reciprocal/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/reciprocal/bin/bin2.meta_lic
index 4d28608..a28cb91 100644
--- a/tools/compliance/cmd/testdata/reciprocal/bin/bin2.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/bin/bin2.meta_lic
@@ -3,10 +3,10 @@
 projects:  "dynamic/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
-built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
-installed:  "out/target/product/fictional/system/bin/bin1"
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
 sources:  "out/target/product/fictional/system/lib/libb.so"
 sources:  "out/target/product/fictional/system/lib/libd.so"
 deps:  {
diff --git a/tools/compliance/cmd/testdata/reciprocal/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/reciprocal/bin/bin3.meta_lic
index 285d899..bb9a3d5 100644
--- a/tools/compliance/cmd/testdata/reciprocal/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/bin/bin3.meta_lic
@@ -3,6 +3,7 @@
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-NCSA"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
 installed:  "out/target/product/fictional/system/bin/bin3"
diff --git a/tools/compliance/cmd/testdata/reciprocal/container.zip.meta_lic b/tools/compliance/cmd/testdata/reciprocal/container.zip.meta_lic
index ea3598f..feb08fe 100644
--- a/tools/compliance/cmd/testdata/reciprocal/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/container.zip.meta_lic
@@ -2,17 +2,17 @@
 projects:  "container/zip"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/reciprocal/highest.apex.meta_lic b/tools/compliance/cmd/testdata/reciprocal/highest.apex.meta_lic
index 1fec741..185d04a 100644
--- a/tools/compliance/cmd/testdata/reciprocal/highest.apex.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/highest.apex.meta_lic
@@ -2,25 +2,25 @@
 projects:  "highest/apex"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/highest_intermediates/highest.apex"
 installed:  "out/target/product/fictional/system/apex/highest.apex"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/liba.so"
-  container_path:  "lib/liba.so"
+  container_path:  "/lib/liba.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/lib/libb.so"
-  container_path:  "lib/libb.so"
+  container_path:  "/lib/libb.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin1"
-  container_path:  "bin/bin1"
+  container_path:  "/bin/bin1"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin2"
-  container_path:  "bin/bin2"
+  container_path:  "/bin/bin2"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/reciprocal/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/reciprocal/lib/liba.so.meta_lic
index 79d7a9e..dd05155 100644
--- a/tools/compliance/cmd/testdata/reciprocal/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/lib/liba.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-MPL"
 license_conditions:  "reciprocal"
+license_texts:  "testdata/reciprocal/RECIPROCAL_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.a"
diff --git a/tools/compliance/cmd/testdata/reciprocal/lib/libb.so.meta_lic b/tools/compliance/cmd/testdata/reciprocal/lib/libb.so.meta_lic
index a4935d4..e60ef73 100644
--- a/tools/compliance/cmd/testdata/reciprocal/lib/libb.so.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/lib/libb.so.meta_lic
@@ -2,7 +2,7 @@
 projects:  "base/library"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.a"
diff --git a/tools/compliance/cmd/testdata/reciprocal/lib/libc.a.meta_lic b/tools/compliance/cmd/testdata/reciprocal/lib/libc.a.meta_lic
index 8f6d356..f794305 100644
--- a/tools/compliance/cmd/testdata/reciprocal/lib/libc.a.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/lib/libc.a.meta_lic
@@ -2,5 +2,6 @@
 projects:  "static/library"
 license_kinds:  "SPDX-license-identifier-MPL"
 license_conditions:  "reciprocal"
+license_texts:  "testdata/reciprocal/RECIPROCAL_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libc.a"
diff --git a/tools/compliance/cmd/testdata/reciprocal/lib/libd.so.meta_lic b/tools/compliance/cmd/testdata/reciprocal/lib/libd.so.meta_lic
index 942d298..e6a060c 100644
--- a/tools/compliance/cmd/testdata/reciprocal/lib/libd.so.meta_lic
+++ b/tools/compliance/cmd/testdata/reciprocal/lib/libd.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "dynamic/library"
 license_kinds:  "SPDX-license-identifier-MIT"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libd.so"
 installed:  "out/target/product/fictional/system/lib/libd.so"
diff --git a/tools/compliance/cmd/testdata/regressgpl1/container.zip.meta_lic b/tools/compliance/cmd/testdata/regressgpl1/container.zip.meta_lic
index 295bcdb..21b6d5a 100644
--- a/tools/compliance/cmd/testdata/regressgpl1/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/regressgpl1/container.zip.meta_lic
@@ -8,11 +8,11 @@
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/bin/bin1"
 sources:  "out/target/product/fictional/system/bin/bin2"
diff --git a/tools/compliance/cmd/testdata/regressgpl2/container.zip.meta_lic b/tools/compliance/cmd/testdata/regressgpl2/container.zip.meta_lic
index 71b68cd..d32bf94 100644
--- a/tools/compliance/cmd/testdata/regressgpl2/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/regressgpl2/container.zip.meta_lic
@@ -8,11 +8,11 @@
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/bin/bin1"
 sources:  "out/target/product/fictional/system/bin/bin2"
diff --git a/tools/compliance/cmd/testdata/restricted/RESTRICTED_LICENSE b/tools/compliance/cmd/testdata/restricted/RESTRICTED_LICENSE
new file mode 100644
index 0000000..16a2819
--- /dev/null
+++ b/tools/compliance/cmd/testdata/restricted/RESTRICTED_LICENSE
@@ -0,0 +1 @@
+###Restricted License###
diff --git a/tools/compliance/cmd/testdata/restricted/application.meta_lic b/tools/compliance/cmd/testdata/restricted/application.meta_lic
index a06a2c8..7ef536d 100644
--- a/tools/compliance/cmd/testdata/restricted/application.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/application.meta_lic
@@ -3,7 +3,7 @@
 projects:  "distributable/application"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/application_intermediates/application"
 installed:  "out/target/product/fictional/bin/application"
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin1.meta_lic
index dd8a2e0..ef0d0c0 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin1.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin1.meta_lic
@@ -3,7 +3,7 @@
 projects:  "static/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
 installed:  "out/target/product/fictional/system/bin/bin1"
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin2.meta_lic
index 714b537..331d5ac 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin2.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin2.meta_lic
@@ -3,10 +3,10 @@
 projects:  "dynamic/binary"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  false
-built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
-installed:  "out/target/product/fictional/system/bin/bin1"
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
 sources:  "out/target/product/fictional/system/lib/libb.so"
 sources:  "out/target/product/fictional/system/lib/libd.so"
 deps:  {
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
index f93553d..7ef14e9 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
@@ -3,6 +3,7 @@
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
 license_conditions:  "restricted"
+license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
 installed:  "out/target/product/fictional/system/bin/bin3"
diff --git a/tools/compliance/cmd/testdata/restricted/container.zip.meta_lic b/tools/compliance/cmd/testdata/restricted/container.zip.meta_lic
index a63263b..47e0e24 100644
--- a/tools/compliance/cmd/testdata/restricted/container.zip.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/container.zip.meta_lic
@@ -2,17 +2,17 @@
 projects:  "container/zip"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
 installed:  "out/target/product/fictional/data/container.zip"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/"
-  container_path:  ""
+  container_path:  "/"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/"
-  container_path:  ""
+  container_path:  "/"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/restricted/highest.apex.meta_lic b/tools/compliance/cmd/testdata/restricted/highest.apex.meta_lic
index dba419a..3042309 100644
--- a/tools/compliance/cmd/testdata/restricted/highest.apex.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/highest.apex.meta_lic
@@ -2,25 +2,25 @@
 projects:  "highest/apex"
 license_kinds:  "SPDX-license-identifier-Apache-2.0"
 license_conditions:  "notice"
-license_texts:  "build/soong/licenses/LICENSE"
+license_texts:  "testdata/firstparty/FIRST_PARTY_LICENSE"
 is_container:  true
 built:  "out/target/product/fictional/obj/ETC/highest_intermediates/highest.apex"
 installed:  "out/target/product/fictional/system/apex/highest.apex"
 install_map {
   from_path:  "out/target/product/fictional/system/lib/liba.so"
-  container_path:  "lib/liba.so"
+  container_path:  "/lib/liba.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/lib/libb.so"
-  container_path:  "lib/libb.so"
+  container_path:  "/lib/libb.so"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin1"
-  container_path:  "bin/bin1"
+  container_path:  "/bin/bin1"
 }
 install_map {
   from_path:  "out/target/product/fictional/system/bin/bin2"
-  container_path:  "bin/bin2"
+  container_path:  "/bin/bin2"
 }
 sources:  "out/target/product/fictional/system/lib/liba.so"
 sources:  "out/target/product/fictional/system/lib/libb.so"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
index b1d4560..a505d4a 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
 license_conditions:  "restricted"
+license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.a"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/libb.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/libb.so.meta_lic
index c1b86d7..739d357 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/libb.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/libb.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "base/library"
 license_kinds:  "SPDX-license-identifier-GPL-2.0"
 license_conditions:  "restricted"
+license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.so"
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libb.a"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/libc.a.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/libc.a.meta_lic
index 8f6d356..f794305 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/libc.a.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/libc.a.meta_lic
@@ -2,5 +2,6 @@
 projects:  "static/library"
 license_kinds:  "SPDX-license-identifier-MPL"
 license_conditions:  "reciprocal"
+license_texts:  "testdata/reciprocal/RECIPROCAL_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libc.a"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/libd.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/libd.so.meta_lic
index 942d298..e6a060c 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/libd.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/libd.so.meta_lic
@@ -2,6 +2,7 @@
 projects:  "dynamic/library"
 license_kinds:  "SPDX-license-identifier-MIT"
 license_conditions:  "notice"
+license_texts:  "testdata/notice/NOTICE_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/libd.so"
 installed:  "out/target/product/fictional/system/lib/libd.so"
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
new file mode 100644
index 0000000..58afb48
--- /dev/null
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -0,0 +1,214 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"compress/gzip"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/tools/compliance"
+
+	"github.com/google/blueprint/deptools"
+)
+
+var (
+	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile    = flag.String("d", "", "Where to write the deps file")
+	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title       = flag.String("title", "", "The title of the notice file.")
+
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout      io.Writer
+	stderr      io.Writer
+	rootFS      fs.FS
+	product     string
+	stripPrefix []string
+	title       string
+	deps        *[]string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				p = ctx.product
+			}
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+func init() {
+	flag.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a text NOTICE file.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flag.PrintDefaults()
+	}
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	flag.Parse()
+
+	// Must specify at least one root target.
+	if flag.NArg() == 0 {
+		flag.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flag.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	var closer io.Closer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+	if strings.HasSuffix(*outputFile, ".gz") {
+		ofile, _ = gzip.NewWriterLevel(obuf, gzip.BestCompression)
+		closer = ofile.(io.Closer)
+	}
+
+	var deps []string
+
+	ctx := &context{ofile, os.Stderr, os.DirFS("."), *product, *stripPrefix, *title, &deps}
+
+	err := textNotice(ctx, flag.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flag.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	if closer != nil {
+		closer.Close()
+	}
+
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+	if *depsFile != "" {
+		err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// textNotice implements the textNotice utility.
+func textNotice(ctx *context, files ...string) error {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+	if err != nil {
+		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return failNoLicenses
+	}
+
+	// rs contains all notice resolutions.
+	rs := compliance.ResolveNotices(licenseGraph)
+
+	ni, err := compliance.IndexLicenseTexts(ctx.rootFS, licenseGraph, rs)
+	if err != nil {
+		return fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	if len(ctx.title) > 0 {
+		fmt.Fprintf(ctx.stdout, "%s\n\n", ctx.title)
+	}
+	for h := range ni.Hashes() {
+		fmt.Fprintln(ctx.stdout, "==============================================================================")
+		for _, libName := range ni.HashLibs(h) {
+			fmt.Fprintf(ctx.stdout, "%s used by:\n", libName)
+			for _, installPath := range ni.HashLibInstalls(h, libName) {
+				fmt.Fprintf(ctx.stdout, "  %s\n", ctx.strip(installPath))
+			}
+			fmt.Fprintln(ctx.stdout)
+		}
+		ctx.stdout.Write(ni.HashText(h))
+		fmt.Fprintln(ctx.stdout)
+	}
+
+	*ctx.deps = ni.InputNoticeFiles()
+
+	return nil
+}
diff --git a/tools/compliance/cmd/textnotice/textnotice_test.go b/tools/compliance/cmd/textnotice/textnotice_test.go
new file mode 100644
index 0000000..9d8d0ca
--- /dev/null
+++ b/tools/compliance/cmd/textnotice/textnotice_test.go
@@ -0,0 +1,716 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"os"
+	"reflect"
+	"regexp"
+	"strings"
+	"testing"
+)
+
+var (
+	horizontalRule = regexp.MustCompile("^===[=]*===$")
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition    string
+		name         string
+		roots        []string
+		stripPrefix  string
+		expectedOut  []matcher
+		expectedDeps []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/liba.so"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/liba.so"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"lib/libd.so"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/bin/bin2"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/bin2"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				restricted{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				restricted{},
+				hr{},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				usedBy{"highest.apex/lib/libb.so"},
+				restricted{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex"},
+				usedBy{"highest.apex/bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"highest.apex/bin/bin2"},
+				library{"Device"},
+				usedBy{"highest.apex/bin/bin1"},
+				usedBy{"highest.apex/lib/liba.so"},
+				library{"External"},
+				usedBy{"highest.apex/bin/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				usedBy{"container.zip/libb.so"},
+				restricted{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip"},
+				usedBy{"container.zip/bin1"},
+				firstParty{},
+				hr{},
+				library{"Android"},
+				usedBy{"container.zip/bin2"},
+				library{"Device"},
+				usedBy{"container.zip/bin1"},
+				usedBy{"container.zip/liba.so"},
+				library{"External"},
+				usedBy{"container.zip/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"application"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"application"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"Android"},
+				usedBy{"bin/bin1"},
+				firstParty{},
+				hr{},
+				library{"Device"},
+				usedBy{"bin/bin1"},
+				library{"External"},
+				usedBy{"bin/bin1"},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				hr{},
+				library{"External"},
+				usedBy{"lib/libd.so"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			var deps []string
+
+			ctx := context{stdout, stderr, os.DirFS("."), "", []string{tt.stripPrefix}, "", &deps}
+
+			err := textNotice(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("textnotice: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("textnotice: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if !tt.expectedOut[lineno].isMatch(line) {
+					t.Errorf("unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno].String())
+				}
+				lineno++
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("textnotice: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno].String())
+			}
+
+			t.Logf("got deps: %q", deps)
+
+			t.Logf("want deps: %q", tt.expectedDeps)
+
+			if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+				t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+					strings.Join(w, "\n"), strings.Join(g, "\n"))
+			}
+		})
+	}
+}
+
+type matcher interface {
+	isMatch(line string) bool
+	String() string
+}
+
+type hr struct{}
+
+func (m hr) isMatch(line string) bool {
+	return horizontalRule.MatchString(line)
+}
+
+func (m hr) String() string {
+	return " ================================================== "
+}
+
+type library struct {
+	name string
+}
+
+func (m library) isMatch(line string) bool {
+	return strings.HasPrefix(line, m.name+" ")
+}
+
+func (m library) String() string {
+	return m.name + " used by:"
+}
+
+type usedBy struct {
+	name string
+}
+
+func (m usedBy) isMatch(line string) bool {
+	return len(line) > 0 && line[0] == ' ' && strings.HasPrefix(strings.TrimLeft(line, " "), "out/") && strings.HasSuffix(line, "/"+m.name)
+}
+
+func (m usedBy) String() string {
+	return "  out/.../" + m.name
+}
+
+type firstParty struct{}
+
+func (m firstParty) isMatch(line string) bool {
+	return strings.HasPrefix(strings.TrimLeft(line, " "), "&&&First Party License&&&")
+}
+
+func (m firstParty) String() string {
+	return "&&&First Party License&&&"
+}
+
+type notice struct{}
+
+func (m notice) isMatch(line string) bool {
+	return strings.HasPrefix(strings.TrimLeft(line, " "), "%%%Notice License%%%")
+}
+
+func (m notice) String() string {
+	return "%%%Notice License%%%"
+}
+
+type reciprocal struct{}
+
+func (m reciprocal) isMatch(line string) bool {
+	return strings.HasPrefix(strings.TrimLeft(line, " "), "$$$Reciprocal License$$$")
+}
+
+func (m reciprocal) String() string {
+	return "$$$Reciprocal License$$$"
+}
+
+type restricted struct{}
+
+func (m restricted) isMatch(line string) bool {
+	return strings.HasPrefix(strings.TrimLeft(line, " "), "###Restricted License###")
+}
+
+func (m restricted) String() string {
+	return "###Restricted License###"
+}
+
+type proprietary struct{}
+
+func (m proprietary) isMatch(line string) bool {
+	return strings.HasPrefix(strings.TrimLeft(line, " "), "@@@Proprietary License@@@")
+}
+
+func (m proprietary) String() string {
+	return "@@@Proprietary License@@@"
+}
+
+type matcherList []matcher
+
+func (l matcherList) String() string {
+	var sb strings.Builder
+	for _, m := range l {
+		s := m.String()
+		if s[:3] == s[len(s)-3:] {
+			fmt.Fprintln(&sb)
+		}
+		fmt.Fprintf(&sb, "%s\n", s)
+		if s[:3] == s[len(s)-3:] {
+			fmt.Fprintln(&sb)
+		}
+	}
+	return sb.String()
+}
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
new file mode 100644
index 0000000..1c712cb
--- /dev/null
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -0,0 +1,222 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"compress/gzip"
+	"encoding/xml"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/tools/compliance"
+
+	"github.com/google/blueprint/deptools"
+)
+
+var (
+	outputFile  = flag.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
+	depsFile    = flag.String("d", "", "Where to write the deps file")
+	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title       = flag.String("title", "", "The title of the notice file.")
+
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout      io.Writer
+	stderr      io.Writer
+	rootFS      fs.FS
+	product     string
+	stripPrefix []string
+	title       string
+	deps        *[]string
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				p = ctx.product
+			}
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+func init() {
+	flag.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
+with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flag.PrintDefaults()
+	}
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(name, usage string) *multiString {
+	var f multiString
+	flag.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	flag.Parse()
+
+	// Must specify at least one root target.
+	if flag.NArg() == 0 {
+		flag.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flag.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	var closer io.Closer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+	if strings.HasSuffix(*outputFile, ".gz") {
+		ofile, _ = gzip.NewWriterLevel(obuf, gzip.BestCompression)
+		closer = ofile.(io.Closer)
+	}
+
+	var deps []string
+
+	ctx := &context{ofile, os.Stderr, os.DirFS("."), *product, *stripPrefix, *title, &deps}
+
+	err := xmlNotice(ctx, flag.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flag.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+	if closer != nil {
+		closer.Close()
+	}
+
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+	if *depsFile != "" {
+		err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// xmlNotice implements the xmlnotice utility.
+func xmlNotice(ctx *context, files ...string) error {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the license graph from the license metadata files (*.meta_lic).
+	licenseGraph, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+	if err != nil {
+		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+	}
+	if licenseGraph == nil {
+		return failNoLicenses
+	}
+
+	// rs contains all notice resolutions.
+	rs := compliance.ResolveNotices(licenseGraph)
+
+	ni, err := compliance.IndexLicenseTexts(ctx.rootFS, licenseGraph, rs)
+	if err != nil {
+		return fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	fmt.Fprintln(ctx.stdout, "<?xml version=\"1.0\" encoding=\"utf-8\"?>")
+	fmt.Fprintln(ctx.stdout, "<licenses>")
+
+	for installPath := range ni.InstallPaths() {
+		p := ctx.strip(installPath)
+		for _, h := range ni.InstallHashes(installPath) {
+			for _, lib := range ni.InstallHashLibs(installPath, h) {
+				fmt.Fprintf(ctx.stdout, "<file-name contentId=\"%s\" lib=\"", h.String())
+				xml.EscapeText(ctx.stdout, []byte(lib))
+				fmt.Fprintf(ctx.stdout, "\">")
+				xml.EscapeText(ctx.stdout, []byte(p))
+				fmt.Fprintln(ctx.stdout, "</file-name>")
+			}
+		}
+	}
+	for h := range ni.Hashes() {
+		fmt.Fprintf(ctx.stdout, "<file-content contentId=\"%s\"><![CDATA[", h)
+		xml.EscapeText(ctx.stdout, ni.HashText(h))
+		fmt.Fprintf(ctx.stdout, "]]></file-content>\n\n")
+	}
+	fmt.Fprintln(ctx.stdout, "</licenses>")
+
+	*ctx.deps = ni.InputNoticeFiles()
+
+	return nil
+}
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
new file mode 100644
index 0000000..424c95e
--- /dev/null
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
@@ -0,0 +1,634 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/xml"
+	"fmt"
+	"os"
+	"reflect"
+	"regexp"
+	"strings"
+	"testing"
+)
+
+var (
+	installTarget = regexp.MustCompile(`^<file-name contentId="[^"]{32}" lib="([^"]*)">([^<]+)</file-name>`)
+	licenseText = regexp.MustCompile(`^<file-content contentId="[^"]{32}"><![[]CDATA[[]([^]]*)[]][]]></file-content>`)
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition    string
+		name         string
+		roots        []string
+		stripPrefix  string
+		expectedOut  []matcher
+		expectedDeps []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				target{"highest.apex", "Android"},
+				target{"highest.apex/bin/bin1", "Android"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/lib/liba.so", "Android"},
+				target{"highest.apex/lib/libb.so", "Android"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				target{"container.zip", "Android"},
+				target{"container.zip/bin1", "Android"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/liba.so", "Android"},
+				target{"container.zip/libb.so", "Android"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				target{"application", "Android"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				target{"bin/bin1", "Android"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				target{"lib/libd.so", "Android"},
+				firstParty{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				target{"highest.apex", "Android"},
+				target{"highest.apex/bin/bin1", "Android"},
+				target{"highest.apex/bin/bin1", "Device"},
+				target{"highest.apex/bin/bin1", "External"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/lib/liba.so", "Device"},
+				target{"highest.apex/lib/libb.so", "Android"},
+				firstParty{},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				target{"container.zip", "Android"},
+				target{"container.zip/bin1", "Android"},
+				target{"container.zip/bin1", "Device"},
+				target{"container.zip/bin1", "External"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/liba.so", "Device"},
+				target{"container.zip/libb.so", "Android"},
+				firstParty{},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				target{"application", "Android"},
+				target{"application", "Device"},
+				firstParty{},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				target{"bin/bin1", "Android"},
+				target{"bin/bin1", "Device"},
+				target{"bin/bin1", "External"},
+				firstParty{},
+				notice{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				target{"lib/libd.so", "External"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				target{"highest.apex", "Android"},
+				target{"highest.apex/bin/bin1", "Android"},
+				target{"highest.apex/bin/bin1", "Device"},
+				target{"highest.apex/bin/bin1", "External"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/lib/liba.so", "Device"},
+				target{"highest.apex/lib/libb.so", "Android"},
+				firstParty{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				target{"container.zip", "Android"},
+				target{"container.zip/bin1", "Android"},
+				target{"container.zip/bin1", "Device"},
+				target{"container.zip/bin1", "External"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/liba.so", "Device"},
+				target{"container.zip/libb.so", "Android"},
+				firstParty{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				target{"application", "Android"},
+				target{"application", "Device"},
+				firstParty{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				target{"bin/bin1", "Android"},
+				target{"bin/bin1", "Device"},
+				target{"bin/bin1", "External"},
+				firstParty{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				target{"lib/libd.so", "External"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				target{"highest.apex", "Android"},
+				target{"highest.apex/bin/bin1", "Android"},
+				target{"highest.apex/bin/bin1", "Device"},
+				target{"highest.apex/bin/bin1", "External"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/lib/liba.so", "Device"},
+				target{"highest.apex/lib/libb.so", "Android"},
+				firstParty{},
+				restricted{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				target{"container.zip", "Android"},
+				target{"container.zip/bin1", "Android"},
+				target{"container.zip/bin1", "Device"},
+				target{"container.zip/bin1", "External"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/liba.so", "Device"},
+				target{"container.zip/libb.so", "Android"},
+				firstParty{},
+				restricted{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				target{"application", "Android"},
+				target{"application", "Device"},
+				firstParty{},
+				restricted{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				target{"bin/bin1", "Android"},
+				target{"bin/bin1", "Device"},
+				target{"bin/bin1", "External"},
+				firstParty{},
+				restricted{},
+				reciprocal{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				target{"lib/libd.so", "External"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				target{"highest.apex", "Android"},
+				target{"highest.apex/bin/bin1", "Android"},
+				target{"highest.apex/bin/bin1", "Device"},
+				target{"highest.apex/bin/bin1", "External"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/bin/bin2", "Android"},
+				target{"highest.apex/lib/liba.so", "Device"},
+				target{"highest.apex/lib/libb.so", "Android"},
+				restricted{},
+				firstParty{},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				target{"container.zip", "Android"},
+				target{"container.zip/bin1", "Android"},
+				target{"container.zip/bin1", "Device"},
+				target{"container.zip/bin1", "External"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/bin2", "Android"},
+				target{"container.zip/liba.so", "Device"},
+				target{"container.zip/libb.so", "Android"},
+				restricted{},
+				firstParty{},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				target{"application", "Android"},
+				target{"application", "Device"},
+				firstParty{},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				target{"bin/bin1", "Android"},
+				target{"bin/bin1", "Device"},
+				target{"bin/bin1", "External"},
+				firstParty{},
+				proprietary{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				target{"lib/libd.so", "External"},
+				notice{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			var deps []string
+
+			ctx := context{stdout, stderr, os.DirFS("."), "", []string{tt.stripPrefix}, "", &deps}
+
+			err := xmlNotice(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("xmlnotice: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("xmlnotice: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			inBody := false
+			outOfBody := true
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if lineno == 0 && !inBody && `<?xml version="1.0" encoding="utf-8"?>` == line {
+					continue
+				}
+				if !inBody {
+					if "<licenses>" == line {
+						inBody = true
+						outOfBody = false
+					}
+					continue
+				} else if "</licenses>" == line {
+					outOfBody = true
+					continue
+				}
+
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("xmlnotice: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if !tt.expectedOut[lineno].isMatch(line) {
+					t.Errorf("xmlnotice: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno].String())
+				}
+				lineno++
+			}
+			if !inBody {
+				t.Errorf("xmlnotice: missing <licenses> tag: got no <licenses> tag, want <licenses> tag on 2nd line")
+			}
+			if !outOfBody {
+				t.Errorf("xmlnotice: missing </licenses> tag: got no </licenses> tag, want </licenses> tag on last line")
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("xmlnotice: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno].String())
+			}
+
+			t.Logf("got deps: %q", deps)
+
+			t.Logf("want deps: %q", tt.expectedDeps)
+
+			if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+				t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+					strings.Join(w, "\n"), strings.Join(g, "\n"))
+			}
+		})
+	}
+}
+
+func escape(s string) string {
+	b := &bytes.Buffer{}
+	xml.EscapeText(b, []byte(s))
+	return b.String()
+}
+
+type matcher interface {
+	isMatch(line string) bool
+	String() string
+}
+
+type target struct {
+	name string
+	lib string
+}
+
+func (m target) isMatch(line string) bool {
+	groups := installTarget.FindStringSubmatch(line)
+	if len(groups) != 3 {
+		return false
+	}
+	return groups[1] == escape(m.lib) && strings.HasPrefix(groups[2], "out/") && strings.HasSuffix(groups[2], "/"+escape(m.name))
+}
+
+func (m target) String() string {
+	return `<file-name contentId="hash" lib="` + escape(m.lib) + `">` + escape(m.name) + `</file-name>`
+}
+
+func matchesText(line, text string) bool {
+	groups := licenseText.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == escape(text + "\n")
+}
+
+func expectedText(text string) string {
+	return `<file-content contentId="hash"><![CDATA[` + escape(text + "\n") + `]]></file-content>`
+}
+
+type firstParty struct{}
+
+func (m firstParty) isMatch(line string) bool {
+	return matchesText(line, "&&&First Party License&&&")
+}
+
+func (m firstParty) String() string {
+	return expectedText("&&&First Party License&&&")
+}
+
+type notice struct{}
+
+func (m notice) isMatch(line string) bool {
+	return matchesText(line, "%%%Notice License%%%")
+}
+
+func (m notice) String() string {
+	return expectedText("%%%Notice License%%%")
+}
+
+type reciprocal struct{}
+
+func (m reciprocal) isMatch(line string) bool {
+	return matchesText(line, "$$$Reciprocal License$$$")
+}
+
+func (m reciprocal) String() string {
+	return expectedText("$$$Reciprocal License$$$")
+}
+
+type restricted struct{}
+
+func (m restricted) isMatch(line string) bool {
+	return matchesText(line, "###Restricted License###")
+}
+
+func (m restricted) String() string {
+	return expectedText("###Restricted License###")
+}
+
+type proprietary struct{}
+
+func (m proprietary) isMatch(line string) bool {
+	return matchesText(line, "@@@Proprietary License@@@")
+}
+
+func (m proprietary) String() string {
+	return expectedText("@@@Proprietary License@@@")
+}
+
+type matcherList []matcher
+
+func (l matcherList) String() string {
+	var sb strings.Builder
+	fmt.Fprintln(&sb, `<?xml version="1.0" encoding="utf-8"?>`)
+	fmt.Fprintln(&sb, `<licenses>`)
+	for _, m := range l {
+		s := m.String()
+		fmt.Fprintln(&sb, s)
+		if _, ok := m.(target); !ok {
+			fmt.Fprintln(&sb)
+		}
+	}
+	fmt.Fprintln(&sb, `/<licenses>`)
+	return sb.String()
+}
diff --git a/tools/compliance/condition.go b/tools/compliance/condition.go
index 26b91ca..cfe6f82 100644
--- a/tools/compliance/condition.go
+++ b/tools/compliance/condition.go
@@ -61,16 +61,16 @@
 var (
 	// RecognizedConditionNames maps condition strings to LicenseCondition.
 	RecognizedConditionNames = map[string]LicenseCondition{
-		"unencumbered": UnencumberedCondition,
-		"permissive": PermissiveCondition,
-		"notice": NoticeCondition,
-		"reciprocal": ReciprocalCondition,
-		"restricted": RestrictedCondition,
+		"unencumbered":                        UnencumberedCondition,
+		"permissive":                          PermissiveCondition,
+		"notice":                              NoticeCondition,
+		"reciprocal":                          ReciprocalCondition,
+		"restricted":                          RestrictedCondition,
 		"restricted_with_classpath_exception": RestrictedClasspathExceptionCondition,
-		"restricted_allows_dynamic_linking": WeaklyRestrictedCondition,
-		"proprietary": ProprietaryCondition,
-		"by_exception_only": ByExceptionOnlyCondition,
-		"not_allowed": NotAllowedCondition,
+		"restricted_allows_dynamic_linking":   WeaklyRestrictedCondition,
+		"proprietary":                         ProprietaryCondition,
+		"by_exception_only":                   ByExceptionOnlyCondition,
+		"not_allowed":                         NotAllowedCondition,
 	}
 )
 
diff --git a/tools/compliance/conditionset_test.go b/tools/compliance/conditionset_test.go
index c7306e7..c91912f 100644
--- a/tools/compliance/conditionset_test.go
+++ b/tools/compliance/conditionset_test.go
@@ -33,82 +33,82 @@
 			conditions: []string{},
 			plus:       &[]string{},
 			matchingAny: map[string][]string{
-				"notice":     []string{},
-				"restricted": []string{},
+				"notice":                []string{},
+				"restricted":            []string{},
 				"restricted|reciprocal": []string{},
 			},
-			expected:   []string{},
+			expected: []string{},
 		},
 		{
 			name:       "emptyminusnothing",
 			conditions: []string{},
 			minus:      &[]string{},
 			matchingAny: map[string][]string{
-				"notice":     []string{},
-				"restricted": []string{},
+				"notice":                []string{},
+				"restricted":            []string{},
 				"restricted|reciprocal": []string{},
 			},
-			expected:   []string{},
+			expected: []string{},
 		},
 		{
 			name:       "emptyminusnotice",
 			conditions: []string{},
 			minus:      &[]string{"notice"},
 			matchingAny: map[string][]string{
-				"notice":     []string{},
-				"restricted": []string{},
+				"notice":                []string{},
+				"restricted":            []string{},
 				"restricted|reciprocal": []string{},
 			},
-			expected:   []string{},
+			expected: []string{},
 		},
 		{
-			name: "noticeonly",
+			name:       "noticeonly",
 			conditions: []string{"notice"},
 			matchingAny: map[string][]string{
-				"notice":     []string{"notice"},
-				"notice|proprietary":     []string{"notice"},
-				"restricted": []string{},
+				"notice":             []string{"notice"},
+				"notice|proprietary": []string{"notice"},
+				"restricted":         []string{},
 			},
 			expected: []string{"notice"},
 		},
 		{
-			name: "allnoticeonly",
+			name:       "allnoticeonly",
 			conditions: []string{"notice"},
-			plus: &[]string{"notice"},
+			plus:       &[]string{"notice"},
 			matchingAny: map[string][]string{
-				"notice":     []string{"notice"},
-				"notice|proprietary":     []string{"notice"},
-				"restricted": []string{},
+				"notice":             []string{"notice"},
+				"notice|proprietary": []string{"notice"},
+				"restricted":         []string{},
 			},
 			expected: []string{"notice"},
 		},
 		{
-			name: "emptyplusnotice",
+			name:       "emptyplusnotice",
 			conditions: []string{},
-			plus: &[]string{"notice"},
+			plus:       &[]string{"notice"},
 			matchingAny: map[string][]string{
-				"notice":     []string{"notice"},
-				"notice|proprietary":     []string{"notice"},
-				"restricted": []string{},
+				"notice":             []string{"notice"},
+				"notice|proprietary": []string{"notice"},
+				"restricted":         []string{},
 			},
 			expected: []string{"notice"},
 		},
 		{
-			name: "everything",
+			name:       "everything",
 			conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
-			plus: &[]string{"restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+			plus:       &[]string{"restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
 			matchingAny: map[string][]string{
-				"unencumbered": []string{"unencumbered"},
-				"permissive":       []string{"permissive"},
-				"notice":     []string{"notice"},
-				"reciprocal":     []string{"reciprocal"},
-				"restricted":     []string{"restricted"},
-				"restricted_with_classpath_exception":     []string{"restricted_with_classpath_exception"},
-				"restricted_allows_dynamic_linking":     []string{"restricted_allows_dynamic_linking"},
-				"proprietary":     []string{"proprietary"},
-				"by_exception_only":     []string{"by_exception_only"},
-				"not_allowed":     []string{"not_allowed"},
-				"notice|proprietary":     []string{"notice", "proprietary"},
+				"unencumbered":                        []string{"unencumbered"},
+				"permissive":                          []string{"permissive"},
+				"notice":                              []string{"notice"},
+				"reciprocal":                          []string{"reciprocal"},
+				"restricted":                          []string{"restricted"},
+				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
+				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
+				"proprietary":                         []string{"proprietary"},
+				"by_exception_only":                   []string{"by_exception_only"},
+				"not_allowed":                         []string{"not_allowed"},
+				"notice|proprietary":                  []string{"notice", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -137,13 +137,13 @@
 				"by_exception_only",
 				"not_allowed",
 			},
-			plus: &[]string{},
+			plus:  &[]string{},
 			minus: &[]string{},
 			matchingAny: map[string][]string{
 				"unencumbered|permissive|notice": []string{"unencumbered", "permissive", "notice"},
-				"restricted|reciprocal":     []string{"reciprocal", "restricted"},
-				"proprietary|by_exception_only":     []string{"proprietary", "by_exception_only"},
-				"not_allowed":     []string{"not_allowed"},
+				"restricted|reciprocal":          []string{"reciprocal", "restricted"},
+				"proprietary|by_exception_only":  []string{"proprietary", "by_exception_only"},
+				"not_allowed":                    []string{"not_allowed"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -159,21 +159,21 @@
 			},
 		},
 		{
-			name: "allbutone",
+			name:       "allbutone",
 			conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
-			plus: &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+			plus:       &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
 			matchingAny: map[string][]string{
-				"unencumbered": []string{"unencumbered"},
-				"permissive":       []string{"permissive"},
-				"notice":     []string{"notice"},
-				"reciprocal":     []string{"reciprocal"},
-				"restricted":     []string{"restricted"},
-				"restricted_with_classpath_exception":     []string{},
-				"restricted_allows_dynamic_linking":     []string{"restricted_allows_dynamic_linking"},
-				"proprietary":     []string{"proprietary"},
-				"by_exception_only":     []string{"by_exception_only"},
-				"not_allowed":     []string{"not_allowed"},
-				"notice|proprietary":     []string{"notice", "proprietary"},
+				"unencumbered":                        []string{"unencumbered"},
+				"permissive":                          []string{"permissive"},
+				"notice":                              []string{"notice"},
+				"reciprocal":                          []string{"reciprocal"},
+				"restricted":                          []string{"restricted"},
+				"restricted_with_classpath_exception": []string{},
+				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
+				"proprietary":                         []string{"proprietary"},
+				"by_exception_only":                   []string{"by_exception_only"},
+				"not_allowed":                         []string{"not_allowed"},
+				"notice|proprietary":                  []string{"notice", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -203,17 +203,17 @@
 			},
 			minus: &[]string{"restricted_allows_dynamic_linking"},
 			matchingAny: map[string][]string{
-				"unencumbered": []string{"unencumbered"},
-				"permissive":       []string{"permissive"},
-				"notice":     []string{"notice"},
-				"reciprocal":     []string{"reciprocal"},
-				"restricted":     []string{"restricted"},
-				"restricted_with_classpath_exception":     []string{"restricted_with_classpath_exception"},
-				"restricted_allows_dynamic_linking":     []string{},
-				"proprietary":     []string{"proprietary"},
-				"by_exception_only":     []string{"by_exception_only"},
-				"not_allowed":     []string{"not_allowed"},
-				"restricted|proprietary":     []string{"restricted", "proprietary"},
+				"unencumbered":                        []string{"unencumbered"},
+				"permissive":                          []string{"permissive"},
+				"notice":                              []string{"notice"},
+				"reciprocal":                          []string{"reciprocal"},
+				"restricted":                          []string{"restricted"},
+				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
+				"restricted_allows_dynamic_linking":   []string{},
+				"proprietary":                         []string{"proprietary"},
+				"by_exception_only":                   []string{"by_exception_only"},
+				"not_allowed":                         []string{"not_allowed"},
+				"restricted|proprietary":              []string{"restricted", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -254,35 +254,35 @@
 				"not_allowed",
 			},
 			matchingAny: map[string][]string{
-				"unencumbered": []string{},
-				"permissive":       []string{},
-				"notice":     []string{},
-				"reciprocal":     []string{},
-				"restricted":     []string{},
-				"restricted_with_classpath_exception":     []string{},
-				"restricted_allows_dynamic_linking":     []string{},
-				"proprietary":     []string{},
-				"by_exception_only":     []string{},
-				"not_allowed":     []string{},
-				"restricted|proprietary":     []string{},
+				"unencumbered":                        []string{},
+				"permissive":                          []string{},
+				"notice":                              []string{},
+				"reciprocal":                          []string{},
+				"restricted":                          []string{},
+				"restricted_with_classpath_exception": []string{},
+				"restricted_allows_dynamic_linking":   []string{},
+				"proprietary":                         []string{},
+				"by_exception_only":                   []string{},
+				"not_allowed":                         []string{},
+				"restricted|proprietary":              []string{},
 			},
 			expected: []string{},
 		},
 		{
-			name: "restrictedplus",
+			name:       "restrictedplus",
 			conditions: []string{"restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking"},
-			plus: &[]string{"permissive", "notice", "restricted", "proprietary"},
+			plus:       &[]string{"permissive", "notice", "restricted", "proprietary"},
 			matchingAny: map[string][]string{
-				"unencumbered":     []string{},
-				"permissive":     []string{"permissive"},
-				"notice":     []string{"notice"},
-				"restricted":     []string{"restricted"},
-				"restricted_with_classpath_exception":     []string{"restricted_with_classpath_exception"},
-				"restricted_allows_dynamic_linking":     []string{"restricted_allows_dynamic_linking"},
-				"proprietary":     []string{"proprietary"},
-				"restricted|proprietary":     []string{"restricted", "proprietary"},
-				"by_exception_only": []string{},
-				"proprietary|by_exception_only":     []string{"proprietary"},
+				"unencumbered":                        []string{},
+				"permissive":                          []string{"permissive"},
+				"notice":                              []string{"notice"},
+				"restricted":                          []string{"restricted"},
+				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
+				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
+				"proprietary":                         []string{"proprietary"},
+				"restricted|proprietary":              []string{"restricted", "proprietary"},
+				"by_exception_only":                   []string{},
+				"proprietary|by_exception_only":       []string{"proprietary"},
 			},
 			expected: []string{"permissive", "notice", "restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "proprietary"},
 		},
@@ -363,7 +363,7 @@
 				}
 				actualConditions := actual.AsList()
 				if len(actualConditions) != len(expectedConditions) {
-					t.Errorf("len(MatchingAny(%d).AsList()):  got %d, want %d",
+					t.Errorf("len(MatchingAny(%s).AsList()):  got %d, want %d",
 						data, len(actualNames), len(expectedNames))
 				} else {
 					for i := 0; i < len(actualNames); i++ {
@@ -452,7 +452,7 @@
 				for i := 0; i < len(actualConditions); i++ {
 					if actualConditions[i] != expectedConditions[i] {
 						t.Errorf("actual.AsList()[%d]: got %s, want %s",
-							i, actualConditions[i], expectedConditions[i])
+							i, actualConditions[i].Name(), expectedConditions[i].Name())
 						break
 					}
 				}
@@ -552,7 +552,7 @@
 				for i := 0; i < len(actualConditions); i++ {
 					if actualConditions[i] != expectedConditions[i] {
 						t.Errorf("actual.AsList()[%d}: got %s, want %s",
-							i, actualConditions[i], expectedConditions[i])
+							i, actualConditions[i].Name(), expectedConditions[i].Name())
 						break
 					}
 				}
@@ -629,7 +629,7 @@
 			if checkExpected(cs, t) {
 				checkMatching(cs, t)
 			}
-			if checkExpectedSet(cs, t){
+			if checkExpectedSet(cs, t) {
 				checkMatchingSet(cs, t)
 			}
 		})
@@ -639,7 +639,7 @@
 			if checkExpected(cs, t) {
 				checkMatching(cs, t)
 			}
-			if checkExpectedSet(cs, t){
+			if checkExpectedSet(cs, t) {
 				checkMatchingSet(cs, t)
 			}
 		})
@@ -649,7 +649,7 @@
 			if checkExpected(cs, t) {
 				checkMatching(cs, t)
 			}
-			if checkExpectedSet(cs, t){
+			if checkExpectedSet(cs, t) {
 				checkMatchingSet(cs, t)
 			}
 		})
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
new file mode 100644
index 0000000..61e2158
--- /dev/null
+++ b/tools/compliance/go.mod
@@ -0,0 +1,18 @@
+module android/soong/tools/compliance
+
+require google.golang.org/protobuf v0.0.0
+
+replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
+
+require android/soong v0.0.0
+
+replace android/soong v0.0.0 => ../../../soong									      
+// Indirect deps from golang-protobuf
+exclude github.com/golang/protobuf v1.5.0
+
+replace github.com/google/go-cmp v0.5.5 => ../../../../external/go-cmp
+
+// Indirect dep from go-cmp
+exclude golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543
+
+go 1.18
diff --git a/tools/compliance/graph.go b/tools/compliance/graph.go
index 97fa657..e73ab46 100644
--- a/tools/compliance/graph.go
+++ b/tools/compliance/graph.go
@@ -148,7 +148,7 @@
 type TargetEdgeList []*TargetEdge
 
 // Len returns the count of the elmements in the list.
-func (l TargetEdgeList) Len() int      { return len(l) }
+func (l TargetEdgeList) Len() int { return len(l) }
 
 // Swap rearranges 2 elements so that each occupies the other's former position.
 func (l TargetEdgeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
@@ -171,7 +171,7 @@
 // edge with a context `ctx` defined by whatever process is creating the path.
 type TargetEdgePathSegment struct {
 	edge *TargetEdge
-	ctx interface{}
+	ctx  interface{}
 }
 
 // Target identifies the target that depends on the dependency.
@@ -245,6 +245,15 @@
 	*p = (*p)[:0]
 }
 
+// Copy makes a new path with the same value.
+func (p *TargetEdgePath) Copy() *TargetEdgePath {
+	result := make(TargetEdgePath, 0, len(*p))
+	for _, e := range *p {
+		result = append(result, e)
+	}
+	return &result
+}
+
 // String returns a string representation of the path: [n1 -> n2 -> ... -> nn].
 func (p *TargetEdgePath) String() string {
 	if p == nil {
@@ -357,6 +366,12 @@
 	return append([]string{}, tn.proto.Installed...)
 }
 
+// TargetFiles returns the list of files built or installed by the module or
+// target. (unordered)
+func (tn *TargetNode) TargetFiles() []string {
+	return append(tn.proto.Built, tn.proto.Installed...)
+}
+
 // InstallMap returns the list of path name transformations to make to move
 // files from their original location in the file system to their destination
 // inside a container. (unordered)
@@ -480,7 +495,7 @@
 type TargetNodeList []*TargetNode
 
 // Len returns the count of elements in the list.
-func (l TargetNodeList) Len() int      { return len(l) }
+func (l TargetNodeList) Len() int { return len(l) }
 
 // Swap rearranges 2 elements so that each occupies the other's former position.
 func (l TargetNodeList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
diff --git a/tools/compliance/noticeindex.go b/tools/compliance/noticeindex.go
new file mode 100644
index 0000000..f082383
--- /dev/null
+++ b/tools/compliance/noticeindex.go
@@ -0,0 +1,697 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compliance
+
+import (
+	"bufio"
+	"crypto/md5"
+	"fmt"
+	"io"
+	"io/fs"
+	"net/url"
+	"path/filepath"
+	"regexp"
+	"sort"
+	"strings"
+)
+
+const (
+	noProjectName = "\u2205"
+)
+
+var (
+	nameRegexp         = regexp.MustCompile(`^\s*name\s*:\s*"(.*)"\s*$`)
+	descRegexp         = regexp.MustCompile(`^\s*description\s*:\s*"(.*)"\s*$`)
+	versionRegexp      = regexp.MustCompile(`^\s*version\s*:\s*"(.*)"\s*$`)
+	licensesPathRegexp = regexp.MustCompile(`licen[cs]es?/`)
+)
+
+// NoticeIndex transforms license metadata into license text hashes, library
+// names, and install paths indexing them for fast lookup/iteration.
+type NoticeIndex struct {
+	// lg identifies the license graph to which the index applies.
+	lg *LicenseGraph
+	// rs identifies the set of resolutions upon which the index is based.
+	rs ResolutionSet
+	// shipped identifies the set of target nodes shipped directly or as derivative works.
+	shipped *TargetNodeSet
+	// rootFS locates the root of the file system from which to read the files.
+	rootFS fs.FS
+	// hash maps license text filenames to content hashes
+	hash map[string]hash
+	// text maps content hashes to content
+	text map[hash][]byte
+	// hashLibInstall maps hashes to libraries to install paths.
+	hashLibInstall map[hash]map[string]map[string]struct{}
+	// installHashLib maps install paths to libraries to hashes.
+	installHashLib map[string]map[hash]map[string]struct{}
+	// libHash maps libraries to hashes.
+	libHash map[string]map[hash]struct{}
+	// targetHash maps target nodes to hashes.
+	targetHashes map[*TargetNode]map[hash]struct{}
+	// projectName maps project directory names to project name text.
+	projectName map[string]string
+	// files lists all the files accessed during indexing
+	files []string
+}
+
+// IndexLicenseTexts creates a hashed index of license texts for `lg` and `rs`
+// using the files rooted at `rootFS`.
+func IndexLicenseTexts(rootFS fs.FS, lg *LicenseGraph, rs ResolutionSet) (*NoticeIndex, error) {
+	if rs == nil {
+		rs = ResolveNotices(lg)
+	}
+	ni := &NoticeIndex{
+		lg:             lg,
+		rs:             rs,
+		shipped:        ShippedNodes(lg),
+		rootFS:         rootFS,
+		hash:           make(map[string]hash),
+		text:           make(map[hash][]byte),
+		hashLibInstall: make(map[hash]map[string]map[string]struct{}),
+		installHashLib: make(map[string]map[hash]map[string]struct{}),
+		libHash:        make(map[string]map[hash]struct{}),
+		targetHashes:   make(map[*TargetNode]map[hash]struct{}),
+		projectName:    make(map[string]string),
+	}
+
+	// index adds all license texts for `tn` to the index.
+	index := func(tn *TargetNode) (map[hash]struct{}, error) {
+		if hashes, ok := ni.targetHashes[tn]; ok {
+			return hashes, nil
+		}
+		hashes := make(map[hash]struct{})
+		for _, text := range tn.LicenseTexts() {
+			fname := strings.SplitN(text, ":", 2)[0]
+			if _, ok := ni.hash[fname]; !ok {
+				err := ni.addText(fname)
+				if err != nil {
+					return nil, err
+				}
+			}
+			hash := ni.hash[fname]
+			if _, ok := hashes[hash]; !ok {
+				hashes[hash] = struct{}{}
+			}
+		}
+		ni.targetHashes[tn] = hashes
+		return hashes, nil
+	}
+
+	link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) {
+		for h := range hashes {
+			libName := ni.getLibName(tn, h)
+			if _, ok := ni.libHash[libName]; !ok {
+				ni.libHash[libName] = make(map[hash]struct{})
+			}
+			if _, ok := ni.hashLibInstall[h]; !ok {
+				ni.hashLibInstall[h] = make(map[string]map[string]struct{})
+			}
+			if _, ok := ni.libHash[libName][h]; !ok {
+				ni.libHash[libName][h] = struct{}{}
+			}
+			for _, installPath := range installPaths {
+				if _, ok := ni.installHashLib[installPath]; !ok {
+					ni.installHashLib[installPath] = make(map[hash]map[string]struct{})
+					ni.installHashLib[installPath][h] = make(map[string]struct{})
+					ni.installHashLib[installPath][h][libName] = struct{}{}
+				} else if _, ok = ni.installHashLib[installPath][h]; !ok {
+					ni.installHashLib[installPath][h] = make(map[string]struct{})
+					ni.installHashLib[installPath][h][libName] = struct{}{}
+				} else if _, ok = ni.installHashLib[installPath][h][libName]; !ok {
+					ni.installHashLib[installPath][h][libName] = struct{}{}
+				}
+				if _, ok := ni.hashLibInstall[h]; !ok {
+					ni.hashLibInstall[h] = make(map[string]map[string]struct{})
+					ni.hashLibInstall[h][libName] = make(map[string]struct{})
+					ni.hashLibInstall[h][libName][installPath] = struct{}{}
+				} else if _, ok = ni.hashLibInstall[h][libName]; !ok {
+					ni.hashLibInstall[h][libName] = make(map[string]struct{})
+					ni.hashLibInstall[h][libName][installPath] = struct{}{}
+				} else if _, ok = ni.hashLibInstall[h][libName][installPath]; !ok {
+					ni.hashLibInstall[h][libName][installPath] = struct{}{}
+				}
+			}
+		}
+	}
+
+	// returns error from walk below.
+	var err error
+
+	WalkTopDown(NoEdgeContext{}, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
+		if err != nil {
+			return false
+		}
+		if !ni.shipped.Contains(tn) {
+			return false
+		}
+		installPaths := getInstallPaths(tn, path)
+		var hashes map[hash]struct{}
+		hashes, err = index(tn)
+		if err != nil {
+			return false
+		}
+		link(tn, hashes, installPaths)
+		if tn.IsContainer() {
+			return true
+		}
+
+		for _, r := range rs.Resolutions(tn) {
+			hashes, err = index(r.actsOn)
+			if err != nil {
+				return false
+			}
+			link(r.actsOn, hashes, installPaths)
+		}
+		return false
+	})
+
+	if err != nil {
+		return nil, err
+	}
+
+	return ni, nil
+}
+
+// Hashes returns an ordered channel of the hashed license texts.
+func (ni *NoticeIndex) Hashes() chan hash {
+	c := make(chan hash)
+	go func() {
+		libs := make([]string, 0, len(ni.libHash))
+		for libName := range ni.libHash {
+			libs = append(libs, libName)
+		}
+		sort.Strings(libs)
+		hashes := make(map[hash]struct{})
+		for _, libName := range libs {
+			hl := make([]hash, 0, len(ni.libHash[libName]))
+			for h := range ni.libHash[libName] {
+				if _, ok := hashes[h]; ok {
+					continue
+				}
+				hashes[h] = struct{}{}
+				hl = append(hl, h)
+			}
+			if len(hl) > 0 {
+				sort.Sort(hashList{ni, libName, "", &hl})
+				for _, h := range hl {
+					c <- h
+				}
+			}
+		}
+		close(c)
+	}()
+	return c
+}
+
+// InputNoticeFiles returns the list of files that were hashed during IndexLicenseTexts.
+func (ni *NoticeIndex) InputNoticeFiles() []string {
+	files := append([]string(nil), ni.files...)
+	sort.Strings(files)
+	return files
+}
+
+// HashLibs returns the ordered array of library names using the license text
+// hashed as `h`.
+func (ni *NoticeIndex) HashLibs(h hash) []string {
+	libs := make([]string, 0, len(ni.hashLibInstall[h]))
+	for libName := range ni.hashLibInstall[h] {
+		libs = append(libs, libName)
+	}
+	sort.Strings(libs)
+	return libs
+}
+
+// HashLibInstalls returns the ordered array of install paths referencing
+// library `libName` using the license text hashed as `h`.
+func (ni *NoticeIndex) HashLibInstalls(h hash, libName string) []string {
+	installs := make([]string, 0, len(ni.hashLibInstall[h][libName]))
+	for installPath := range ni.hashLibInstall[h][libName] {
+		installs = append(installs, installPath)
+	}
+	sort.Strings(installs)
+	return installs
+}
+
+// InstallPaths returns the ordered channel of indexed install paths.
+func (ni *NoticeIndex) InstallPaths() chan string {
+	c := make(chan string)
+	go func() {
+		paths := make([]string, 0, len(ni.installHashLib))
+		for path := range ni.installHashLib {
+			paths = append(paths, path)
+		}
+		sort.Strings(paths)
+		for _, installPath := range paths {
+			c <- installPath
+		}
+		close(c)
+	}()
+	return c
+}
+
+// InstallHashes returns the ordered array of hashes attached to `installPath`.
+func (ni *NoticeIndex) InstallHashes(installPath string) []hash {
+	result := make([]hash, 0, len(ni.installHashLib[installPath]))
+	for h := range ni.installHashLib[installPath] {
+		result = append(result, h)
+	}
+	if len(result) > 0 {
+		sort.Sort(hashList{ni, "", installPath, &result})
+	}
+	return result
+}
+
+// InstallHashLibs returns the ordered array of library names attached to
+// `installPath` as hash `h`.
+func (ni *NoticeIndex) InstallHashLibs(installPath string, h hash) []string {
+	result := make([]string, 0, len(ni.installHashLib[installPath][h]))
+	for libName := range ni.installHashLib[installPath][h] {
+		result = append(result, libName)
+	}
+	sort.Strings(result)
+	return result
+}
+
+// Libraries returns the ordered channel of indexed library names.
+func (ni *NoticeIndex) Libraries() chan string {
+	c := make(chan string)
+	go func() {
+		libs := make([]string, 0, len(ni.libHash))
+		for lib := range ni.libHash {
+			libs = append(libs, lib)
+		}
+		sort.Strings(libs)
+		for _, lib := range libs {
+			c <- lib
+		}
+		close(c)
+	}()
+	return c
+}
+
+// HashText returns the file content of the license text hashed as `h`.
+func (ni *NoticeIndex) HashText(h hash) []byte {
+	return ni.text[h]
+}
+
+// getLibName returns the name of the library associated with `noticeFor`.
+func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) string {
+	for _, text := range noticeFor.LicenseTexts() {
+		if !strings.Contains(text, ":") {
+			if ni.hash[text].key != h.key {
+				continue
+			}
+			ln := ni.checkMetadataForLicenseText(noticeFor, text)
+			if len(ln) > 0 {
+				return ln
+			}
+			continue
+		}
+
+		fields := strings.SplitN(text, ":", 2)
+		fname, pname := fields[0], fields[1]
+		if ni.hash[fname].key != h.key {
+			continue
+		}
+
+		ln, err := url.QueryUnescape(pname)
+		if err != nil {
+			continue
+		}
+		return ln
+	}
+	// use name from METADATA if available
+	ln := ni.checkMetadata(noticeFor)
+	if len(ln) > 0 {
+		return ln
+	}
+	// use package_name: from license{} module if available
+	pn := noticeFor.PackageName()
+	if len(pn) > 0 {
+		return pn
+	}
+	for _, p := range noticeFor.Projects() {
+		if strings.HasPrefix(p, "prebuilts/") {
+			for _, licenseText := range noticeFor.LicenseTexts() {
+				if !strings.HasPrefix(licenseText, "prebuilts/") {
+					continue
+				}
+				if !strings.Contains(licenseText, ":") {
+					if ni.hash[licenseText].key != h.key {
+						continue
+					}
+				} else {
+					fields := strings.SplitN(licenseText, ":", 2)
+					fname := fields[0]
+					if ni.hash[fname].key != h.key {
+						continue
+					}
+				}
+				for r, prefix := range SafePrebuiltPrefixes {
+					match := r.FindString(licenseText)
+					if len(match) == 0 {
+						continue
+					}
+					strip := SafePathPrefixes[prefix]
+					if strip {
+						// strip entire prefix
+						match = licenseText[len(match):]
+					} else {
+						// strip from prebuilts/ until safe prefix
+						match = licenseText[len(match)-len(prefix):]
+					}
+					// remove LICENSE or NOTICE or other filename
+					li := strings.LastIndex(match, "/")
+					if li > 0 {
+						match = match[:li]
+					}
+					// remove *licenses/ path segment and subdirectory if in path
+					if offsets := licensesPathRegexp.FindAllStringIndex(match, -1); offsets != nil && offsets[len(offsets)-1][0] > 0 {
+						match = match[:offsets[len(offsets)-1][0]]
+						li = strings.LastIndex(match, "/")
+						if li > 0 {
+							match = match[:li]
+						}
+					}
+					return match
+				}
+				break
+			}
+		}
+		for prefix, strip := range SafePathPrefixes {
+			if strings.HasPrefix(p, prefix) {
+				if strip {
+					return p[len(prefix):]
+				} else {
+					return p
+				}
+			}
+		}
+	}
+	// strip off [./]meta_lic from license metadata path and extract base name
+	n := noticeFor.name[:len(noticeFor.name)-9]
+	li := strings.LastIndex(n, "/")
+	if li > 0 {
+		n = n[li+1:]
+	}
+	fi := strings.Index(n, "@")
+	if fi > 0 {
+		n = n[:fi]
+	}
+	return n
+}
+
+// checkMetadata tries to look up a library name from a METADATA file associated with `noticeFor`.
+func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) string {
+	for _, p := range noticeFor.Projects() {
+		if name, ok := ni.projectName[p]; ok {
+			if name == noProjectName {
+				continue
+			}
+			return name
+		}
+		name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
+		if err != nil {
+			ni.projectName[p] = noProjectName
+			continue
+		}
+		if len(name) == 0 {
+			ni.projectName[p] = noProjectName
+			continue
+		}
+		ni.projectName[p] = name
+		return name
+	}
+	return ""
+}
+
+// checkMetadataForLicenseText
+func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) string {
+	p := ""
+	for _, proj := range noticeFor.Projects() {
+		if strings.HasPrefix(licenseText, proj) {
+			p = proj
+		}
+	}
+	if len(p) == 0 {
+		p = filepath.Dir(licenseText)
+		for {
+			fi, err := fs.Stat(ni.rootFS, filepath.Join(p, ".git"))
+			if err == nil && fi.IsDir() {
+				break
+			}
+			if strings.Contains(p, "/") && p != "/" {
+				p = filepath.Dir(p)
+				continue
+			}
+			return ""
+		}
+	}
+	if name, ok := ni.projectName[p]; ok {
+		if name == noProjectName {
+			return ""
+		}
+		return name
+	}
+	name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
+	if err == nil && len(name) > 0 {
+		ni.projectName[p] = name
+		return name
+	}
+	ni.projectName[p] = noProjectName
+	return ""
+}
+
+// checkMetadataFile tries to look up a library name from a METADATA file at `path`.
+func (ni *NoticeIndex) checkMetadataFile(path string) (string, error) {
+	f, err := ni.rootFS.Open(path)
+	if err != nil {
+		return "", err
+	}
+	name := ""
+	description := ""
+	version := ""
+	s := bufio.NewScanner(f)
+	for s.Scan() {
+		line := s.Text()
+		m := nameRegexp.FindStringSubmatch(line)
+		if m != nil {
+			if 1 < len(m) && m[1] != "" {
+				name = m[1]
+			}
+			if version != "" {
+				break
+			}
+			continue
+		}
+		m = versionRegexp.FindStringSubmatch(line)
+		if m != nil {
+			if 1 < len(m) && m[1] != "" {
+				version = m[1]
+			}
+			if name != "" {
+				break
+			}
+			continue
+		}
+		m = descRegexp.FindStringSubmatch(line)
+		if m != nil {
+			if 1 < len(m) && m[1] != "" {
+				description = m[1]
+			}
+		}
+	}
+	_ = s.Err()
+	_ = f.Close()
+	if name != "" {
+		if version != "" {
+			if version[0] == 'v' || version[0] == 'V' {
+				return name + "_" + version, nil
+			} else {
+				return name + "_v_" + version, nil
+			}
+		}
+		return name, nil
+	}
+	if description != "" {
+		return description, nil
+	}
+	return "", nil
+}
+
+// addText reads and indexes the content of a license text file.
+func (ni *NoticeIndex) addText(file string) error {
+	f, err := ni.rootFS.Open(filepath.Clean(file))
+	if err != nil {
+		return fmt.Errorf("error opening license text file %q: %w", file, err)
+	}
+
+	// read the file
+	text, err := io.ReadAll(f)
+	if err != nil {
+		return fmt.Errorf("error reading license text file %q: %w", file, err)
+	}
+
+	hash := hash{fmt.Sprintf("%x", md5.Sum(text))}
+	ni.hash[file] = hash
+	if _, alreadyPresent := ni.text[hash]; !alreadyPresent {
+		ni.text[hash] = text
+	}
+
+	ni.files = append(ni.files, file)
+
+	return nil
+}
+
+// getInstallPaths returns the names of the used dependencies mapped to their
+// installed locations.
+func getInstallPaths(attachesTo *TargetNode, path TargetEdgePath) []string {
+	if len(path) == 0 {
+		installs := attachesTo.Installed()
+		if 0 == len(installs) {
+			installs = attachesTo.Built()
+		}
+		return installs
+	}
+
+	var getInstalls func(path TargetEdgePath) []string
+
+	getInstalls = func(path TargetEdgePath) []string {
+		// deps contains the output targets from the dependencies in the path
+		var deps []string
+		if len(path) > 1 {
+			// recursively get the targets from the sub-path skipping 1 path segment
+			deps = getInstalls(path[1:])
+		} else {
+			// stop recursion at 1 path segment
+			deps = path[0].Dependency().TargetFiles()
+		}
+		size := 0
+		prefixes := path[0].Target().TargetFiles()
+		installMap := path[0].Target().InstallMap()
+		sources := path[0].Target().Sources()
+		for _, dep := range deps {
+			found := false
+			for _, source := range sources {
+				if strings.HasPrefix(dep, source) {
+					found = true
+					break
+				}
+			}
+			if !found {
+				continue
+			}
+			for _, im := range installMap {
+				if strings.HasPrefix(dep, im.FromPath) {
+					size += len(prefixes)
+					break
+				}
+			}
+		}
+
+		installs := make([]string, 0, size)
+		for _, dep := range deps {
+			found := false
+			for _, source := range sources {
+				if strings.HasPrefix(dep, source) {
+					found = true
+					break
+				}
+			}
+			if !found {
+				continue
+			}
+			for _, im := range installMap {
+				if strings.HasPrefix(dep, im.FromPath) {
+					for _, prefix := range prefixes {
+						installs = append(installs, prefix+im.ContainerPath+dep[len(im.FromPath):])
+					}
+					break
+				}
+			}
+		}
+		return installs
+	}
+	allInstalls := getInstalls(path)
+	installs := path[0].Target().Installed()
+	if len(installs) == 0 {
+		return allInstalls
+	}
+	result := make([]string, 0, len(allInstalls))
+	for _, install := range allInstalls {
+		for _, prefix := range installs {
+			if strings.HasPrefix(install, prefix) {
+				result = append(result, install)
+			}
+		}
+	}
+	return result
+}
+
+// hash is an opaque string derived from md5sum.
+type hash struct {
+	key string
+}
+
+// String returns the hexadecimal representation of the hash.
+func (h hash) String() string {
+	return h.key
+}
+
+// hashList orders an array of hashes
+type hashList struct {
+	ni          *NoticeIndex
+	libName     string
+	installPath string
+	hashes      *[]hash
+}
+
+// Len returns the count of elements in the slice.
+func (l hashList) Len() int { return len(*l.hashes) }
+
+// Swap rearranges 2 elements of the slice so that each occupies the other's
+// former position.
+func (l hashList) Swap(i, j int) { (*l.hashes)[i], (*l.hashes)[j] = (*l.hashes)[j], (*l.hashes)[i] }
+
+// Less returns true when the `i`th element is lexicographically less than
+// the `j`th element.
+func (l hashList) Less(i, j int) bool {
+	var insti, instj int
+	if len(l.libName) > 0 {
+		insti = len(l.ni.hashLibInstall[(*l.hashes)[i]][l.libName])
+		instj = len(l.ni.hashLibInstall[(*l.hashes)[j]][l.libName])
+	} else {
+		libsi := l.ni.InstallHashLibs(l.installPath, (*l.hashes)[i])
+		libsj := l.ni.InstallHashLibs(l.installPath, (*l.hashes)[j])
+		libsis := strings.Join(libsi, " ")
+		libsjs := strings.Join(libsj, " ")
+		if libsis != libsjs {
+			return libsis < libsjs
+		}
+	}
+	if insti == instj {
+		leni := len(l.ni.text[(*l.hashes)[i]])
+		lenj := len(l.ni.text[(*l.hashes)[j]])
+		if leni == lenj {
+			// all else equal, just order by hash value
+			return (*l.hashes)[i].key < (*l.hashes)[j].key
+		}
+		// put shortest texts first within same # of installs
+		return leni < lenj
+	}
+	// reverse order of # installs so that most popular appears first
+	return instj < insti
+}
diff --git a/tools/compliance/policy/policy.go b/tools/compliance/policy_policy.go
similarity index 86%
rename from tools/compliance/policy/policy.go
rename to tools/compliance/policy_policy.go
index 581912a..60bdf48 100644
--- a/tools/compliance/policy/policy.go
+++ b/tools/compliance/policy_policy.go
@@ -29,6 +29,31 @@
 		"toolchain": "toolchain",
 	}
 
+	// SafePathPrefixes maps the path prefixes presumed not to contain any
+	// proprietary or confidential pathnames to whether to strip the prefix
+	// from the path when used as the library name for notices.
+	SafePathPrefixes = map[string]bool{
+		"external/":    true,
+		"art/":         false,
+		"build/":       false,
+		"cts/":         false,
+		"dalvik/":      false,
+		"developers/":  false,
+		"development/": false,
+		"frameworks/":  false,
+		"packages/":    true,
+		"prebuilts/":   false,
+		"sdk/":         false,
+		"system/":      false,
+		"test/":        false,
+		"toolchain/":   false,
+		"tools/":       false,
+	}
+
+	// SafePrebuiltPrefixes maps the regular expression to match a prebuilt
+	// containing the path of a safe prefix to the safe prefix.
+	SafePrebuiltPrefixes = make(map[*regexp.Regexp]string)
+
 	// ImpliesUnencumbered lists the condition names representing an author attempt to disclaim copyright.
 	ImpliesUnencumbered = LicenseConditionSet(UnencumberedCondition)
 
@@ -37,8 +62,8 @@
 
 	// ImpliesNotice lists the condition names implying a notice or attribution policy.
 	ImpliesNotice = LicenseConditionSet(UnencumberedCondition | PermissiveCondition | NoticeCondition | ReciprocalCondition |
-			RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition |
-			ProprietaryCondition | ByExceptionOnlyCondition)
+		RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition |
+		ProprietaryCondition | ByExceptionOnlyCondition)
 
 	// ImpliesReciprocal lists the condition names implying a local source-sharing policy.
 	ImpliesReciprocal = LicenseConditionSet(ReciprocalCondition)
@@ -66,6 +91,15 @@
 	ccBySa       = regexp.MustCompile(`^SPDX-license-identifier-CC-BY.*-SA.*`)
 )
 
+func init() {
+	for prefix := range SafePathPrefixes {
+		if prefix == "prebuilts/" {
+			continue
+		}
+		r := regexp.MustCompile("^prebuilts/[^ ]*/" + prefix)
+		SafePrebuiltPrefixes[r] = prefix
+	}
+}
 
 // LicenseConditionSetFromNames returns a set containing the recognized `names` and
 // silently ignoring or discarding the unrecognized `names`.
@@ -109,7 +143,6 @@
 	return cs
 }
 
-
 // Resolution happens in three phases:
 //
 // 1. A bottom-up traversal propagates (restricted) license conditions up to
@@ -148,7 +181,6 @@
 // Not all restricted licenses are create equal. Some have special rules or
 // exceptions. e.g. LGPL or "with classpath excption".
 
-
 // depConditionsPropagatingToTarget returns the conditions which propagate up an
 // edge from dependency to target.
 //
@@ -170,7 +202,7 @@
 	}
 
 	result |= depConditions & LicenseConditionSet(RestrictedCondition)
-	if 0 != (depConditions & LicenseConditionSet(RestrictedClasspathExceptionCondition)) && !edgeNodesAreIndependentModules(e) {
+	if 0 != (depConditions&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && !edgeNodesAreIndependentModules(e) {
 		result |= LicenseConditionSet(RestrictedClasspathExceptionCondition)
 	}
 	return result
@@ -186,7 +218,7 @@
 // aggregation, per policy it ceases to be a pure aggregation in the context of
 // that derivative work. The `treatAsAggregate` parameter will be false for
 // non-aggregates and for aggregates in non-aggregate contexts.
-func targetConditionsPropagatingToDep(lg *LicenseGraph, e *TargetEdge, targetConditions LicenseConditionSet, treatAsAggregate bool) LicenseConditionSet {
+func targetConditionsPropagatingToDep(lg *LicenseGraph, e *TargetEdge, targetConditions LicenseConditionSet, treatAsAggregate bool, conditionsFn TraceConditions) LicenseConditionSet {
 	result := targetConditions
 
 	// reverse direction -- none of these apply to things depended-on, only to targets depending-on.
@@ -200,7 +232,7 @@
 	if treatAsAggregate {
 		// If the author of a pure aggregate licenses it restricted, apply restricted to immediate dependencies.
 		// Otherwise, restricted does not propagate back down to dependencies.
-		if !LicenseConditionSetFromNames(e.target, e.target.proto.LicenseConditions...).MatchesAnySet(ImpliesRestricted) {
+		if !conditionsFn(e.target).MatchesAnySet(ImpliesRestricted) {
 			result = result.Difference(ImpliesRestricted)
 		}
 		return result
@@ -230,13 +262,12 @@
 	}
 
 	result &= LicenseConditionSet(RestrictedCondition | RestrictedClasspathExceptionCondition)
-	if 0 != (result & LicenseConditionSet(RestrictedClasspathExceptionCondition)) && edgeNodesAreIndependentModules(e) {
+	if 0 != (result&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && edgeNodesAreIndependentModules(e) {
 		result &= LicenseConditionSet(RestrictedCondition)
 	}
 	return result
 }
 
-
 // edgeIsDynamicLink returns true for edges representing shared libraries
 // linked dynamically at runtime.
 func edgeIsDynamicLink(e *TargetEdge) bool {
diff --git a/tools/compliance/policy/policy_test.go b/tools/compliance/policy_policy_test.go
similarity index 98%
rename from tools/compliance/policy/policy_test.go
rename to tools/compliance/policy_policy_test.go
index 09e831c..27ce16c 100644
--- a/tools/compliance/policy/policy_test.go
+++ b/tools/compliance/policy_policy_test.go
@@ -226,7 +226,7 @@
 			fs[tt.edge.dep] = []byte(meta[tt.edge.dep])
 			lg, err := ReadLicenseGraph(&fs, stderr, []string{tt.edge.target})
 			if err != nil {
-				t.Errorf("unexpected error reading graph: %w", err)
+				t.Errorf("unexpected error reading graph: %s", err)
 				return
 			}
 			edge := lg.Edges()[0]
@@ -282,7 +282,7 @@
 						targetConditions = targetConditions.Union(otn.licenseConditions)
 					}
 					t.Logf("calculate dep conditions for edge=%s, target conditions=%v, treatAsAggregate=%v", edge.String(), targetConditions.Names(), tt.treatAsAggregate)
-					cs := targetConditionsPropagatingToDep(lg, edge, targetConditions, tt.treatAsAggregate)
+					cs := targetConditionsPropagatingToDep(lg, edge, targetConditions, tt.treatAsAggregate, AllResolutions)
 					t.Logf("calculated dep conditions as %v", cs.Names())
 					actual := cs.Names()
 					sort.Strings(actual)
diff --git a/tools/compliance/policy/resolve.go b/tools/compliance/policy_resolve.go
similarity index 82%
rename from tools/compliance/policy/resolve.go
rename to tools/compliance/policy_resolve.go
index 336894a..d357aec 100644
--- a/tools/compliance/policy/resolve.go
+++ b/tools/compliance/policy_resolve.go
@@ -18,6 +18,16 @@
 	"sync"
 )
 
+var (
+	// AllResolutions is a TraceConditions function that resolves all
+	// unfiltered license conditions.
+	AllResolutions = TraceConditions(func(tn *TargetNode) LicenseConditionSet { return tn.licenseConditions })
+)
+
+// TraceConditions is a function that returns the conditions to trace for each
+// target node `tn`.
+type TraceConditions func(tn *TargetNode) LicenseConditionSet
+
 // ResolveBottomUpConditions performs a bottom-up walk of the LicenseGraph
 // propagating conditions up the graph as necessary according to the properties
 // of each edge and according to each license condition in question.
@@ -29,6 +39,14 @@
 // not resolve the library and its transitive closure, but the later top-down
 // walk will.
 func ResolveBottomUpConditions(lg *LicenseGraph) {
+	TraceBottomUpConditions(lg, AllResolutions)
+}
+
+// TraceBottomUpConditions performs a bottom-up walk of the LicenseGraph
+// propagating trace conditions from `conditionsFn` up the graph as necessary
+// according to the properties of each edge and according to each license
+// condition in question.
+func TraceBottomUpConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
 	lg.mu.Lock()
@@ -70,7 +88,7 @@
 				// needs to walk again in non-aggregate context
 				delete(cmap, target)
 			} else {
-				target.resolution |= target.licenseConditions
+				target.resolution |= conditionsFn(target)
 				amap[target] = struct{}{}
 			}
 			if treatAsAggregate {
@@ -123,6 +141,13 @@
 // dependency except restricted. For restricted, the policy is to share the
 // source of any libraries linked to restricted code and to provide notice.
 func ResolveTopDownConditions(lg *LicenseGraph) {
+	TraceTopDownConditions(lg, AllResolutions)
+}
+
+// TraceTopDownCondtions performs a top-down walk of the LicenseGraph
+// propagating trace conditions returned by `conditionsFn` from target to
+// dependency.
+func TraceTopDownConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
 	lg.mu.Lock()
@@ -139,7 +164,7 @@
 	lg.mu.Unlock()
 
 	// start with the conditions propagated up the graph
-	ResolveBottomUpConditions(lg)
+	TraceBottomUpConditions(lg, conditionsFn)
 
 	// amap contains the set of targets already walked. (guarded by mu)
 	amap := make(map[*TargetNode]struct{})
@@ -156,7 +181,7 @@
 	walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
 		defer wg.Done()
 		mu.Lock()
-		fnode.resolution |= fnode.licenseConditions
+		fnode.resolution |= conditionsFn(fnode)
 		fnode.resolution |= cs
 		amap[fnode] = struct{}{}
 		if treatAsAggregate {
@@ -168,7 +193,7 @@
 		for _, edge := range fnode.edges {
 			func(edge *TargetEdge) {
 				// dcs holds the dpendency conditions inherited from the target
-				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate)
+				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
 				dnode := edge.dependency
 				mu.Lock()
 				defer mu.Unlock()
diff --git a/tools/compliance/policy/resolve_test.go b/tools/compliance/policy_resolve_test.go
similarity index 98%
rename from tools/compliance/policy/resolve_test.go
rename to tools/compliance/policy_resolve_test.go
index 09dd7dd..f98e4cc 100644
--- a/tools/compliance/policy/resolve_test.go
+++ b/tools/compliance/policy_resolve_test.go
@@ -332,7 +332,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 
@@ -643,7 +643,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 
diff --git a/tools/compliance/policy/resolvenotices.go b/tools/compliance/policy_resolvenotices.go
similarity index 100%
rename from tools/compliance/policy/resolvenotices.go
rename to tools/compliance/policy_resolvenotices.go
diff --git a/tools/compliance/policy/resolvenotices_test.go b/tools/compliance/policy_resolvenotices_test.go
similarity index 99%
rename from tools/compliance/policy/resolvenotices_test.go
rename to tools/compliance/policy_resolvenotices_test.go
index 275c0a5..cd9dd71 100644
--- a/tools/compliance/policy/resolvenotices_test.go
+++ b/tools/compliance/policy_resolvenotices_test.go
@@ -457,7 +457,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedRs := toResolutionSet(lg, tt.expectedResolutions)
diff --git a/tools/compliance/policy/resolveprivacy.go b/tools/compliance/policy_resolveprivacy.go
similarity index 100%
rename from tools/compliance/policy/resolveprivacy.go
rename to tools/compliance/policy_resolveprivacy.go
diff --git a/tools/compliance/policy/resolveprivacy_test.go b/tools/compliance/policy_resolveprivacy_test.go
similarity index 96%
rename from tools/compliance/policy/resolveprivacy_test.go
rename to tools/compliance/policy_resolveprivacy_test.go
index 2072d22..e8c953a 100644
--- a/tools/compliance/policy/resolveprivacy_test.go
+++ b/tools/compliance/policy_resolveprivacy_test.go
@@ -76,7 +76,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedRs := toResolutionSet(lg, tt.expectedResolutions)
diff --git a/tools/compliance/policy/resolveshare.go b/tools/compliance/policy_resolveshare.go
similarity index 100%
rename from tools/compliance/policy/resolveshare.go
rename to tools/compliance/policy_resolveshare.go
diff --git a/tools/compliance/policy/resolveshare_test.go b/tools/compliance/policy_resolveshare_test.go
similarity index 98%
rename from tools/compliance/policy/resolveshare_test.go
rename to tools/compliance/policy_resolveshare_test.go
index f73888d..c451b86 100644
--- a/tools/compliance/policy/resolveshare_test.go
+++ b/tools/compliance/policy_resolveshare_test.go
@@ -286,7 +286,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedRs := toResolutionSet(lg, tt.expectedResolutions)
diff --git a/tools/compliance/policy/shareprivacyconflicts.go b/tools/compliance/policy_shareprivacyconflicts.go
similarity index 100%
rename from tools/compliance/policy/shareprivacyconflicts.go
rename to tools/compliance/policy_shareprivacyconflicts.go
diff --git a/tools/compliance/policy/shareprivacyconflicts_test.go b/tools/compliance/policy_shareprivacyconflicts_test.go
similarity index 96%
rename from tools/compliance/policy/shareprivacyconflicts_test.go
rename to tools/compliance/policy_shareprivacyconflicts_test.go
index ad3f3f4..069daa2 100644
--- a/tools/compliance/policy/shareprivacyconflicts_test.go
+++ b/tools/compliance/policy_shareprivacyconflicts_test.go
@@ -24,7 +24,7 @@
 type byConflict []SourceSharePrivacyConflict
 
 // Len returns the count of elements in the slice.
-func (l byConflict) Len() int      { return len(l) }
+func (l byConflict) Len() int { return len(l) }
 
 // Swap rearranged 2 elements so that each occupies the other's former
 // position.
@@ -99,7 +99,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedConflicts := toConflictList(lg, tt.expectedConflicts)
diff --git a/tools/compliance/policy/shipped.go b/tools/compliance/policy_shipped.go
similarity index 100%
rename from tools/compliance/policy/shipped.go
rename to tools/compliance/policy_shipped.go
diff --git a/tools/compliance/policy/shipped_test.go b/tools/compliance/policy_shipped_test.go
similarity index 83%
rename from tools/compliance/policy/shipped_test.go
rename to tools/compliance/policy_shipped_test.go
index 718e56f..3ae9b46 100644
--- a/tools/compliance/policy/shipped_test.go
+++ b/tools/compliance/policy_shipped_test.go
@@ -29,30 +29,30 @@
 		expectedNodes []string
 	}{
 		{
-			name:      "singleton",
-			roots:     []string{"apacheLib.meta_lic"},
-			edges: []annotated{},
+			name:          "singleton",
+			roots:         []string{"apacheLib.meta_lic"},
+			edges:         []annotated{},
 			expectedNodes: []string{"apacheLib.meta_lic"},
 		},
 		{
-			name:      "simplebinary",
-			roots:     []string{"apacheBin.meta_lic"},
+			name:  "simplebinary",
+			roots: []string{"apacheBin.meta_lic"},
 			edges: []annotated{
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedNodes: []string{"apacheBin.meta_lic", "apacheLib.meta_lic"},
 		},
 		{
-			name:      "simpledynamic",
-			roots:     []string{"apacheBin.meta_lic"},
+			name:  "simpledynamic",
+			roots: []string{"apacheBin.meta_lic"},
 			edges: []annotated{
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedNodes: []string{"apacheBin.meta_lic"},
 		},
 		{
-			name:      "container",
-			roots:     []string{"apacheContainer.meta_lic"},
+			name:  "container",
+			roots: []string{"apacheContainer.meta_lic"},
 			edges: []annotated{
 				{"apacheContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
@@ -64,8 +64,8 @@
 			},
 		},
 		{
-			name:      "binary",
-			roots:     []string{"apacheBin.meta_lic"},
+			name:  "binary",
+			roots: []string{"apacheBin.meta_lic"},
 			edges: []annotated{
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
@@ -77,8 +77,8 @@
 			},
 		},
 		{
-			name:      "binarydynamic",
-			roots:     []string{"apacheBin.meta_lic"},
+			name:  "binarydynamic",
+			roots: []string{"apacheBin.meta_lic"},
 			edges: []annotated{
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"dynamic"}},
@@ -89,8 +89,8 @@
 			},
 		},
 		{
-			name:      "containerdeep",
-			roots:     []string{"apacheContainer.meta_lic"},
+			name:  "containerdeep",
+			roots: []string{"apacheContainer.meta_lic"},
 			edges: []annotated{
 				{"apacheContainer.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
@@ -108,7 +108,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			t.Logf("graph:")
@@ -127,7 +127,7 @@
 
 			t.Logf("sorted nodes: [%s]", strings.Join(actualNodes, ", "))
 			t.Logf("expected nodes: [%s]", strings.Join(expectedNodes, ", "))
-                        if len(expectedNodes) != len(actualNodes) {
+			if len(expectedNodes) != len(actualNodes) {
 				t.Errorf("unexpected number of shipped nodes: %d nodes, want %d nodes",
 					len(actualNodes), len(expectedNodes))
 				return
diff --git a/tools/compliance/policy/walk.go b/tools/compliance/policy_walk.go
similarity index 99%
rename from tools/compliance/policy/walk.go
rename to tools/compliance/policy_walk.go
index 3e73088..f4d7bba 100644
--- a/tools/compliance/policy/walk.go
+++ b/tools/compliance/policy_walk.go
@@ -83,7 +83,7 @@
 // specific set of conditions.
 type resolutionKey struct {
 	target *TargetNode
-	cs LicenseConditionSet
+	cs     LicenseConditionSet
 }
 
 // WalkResolutionsForCondition performs a top-down walk of the LicenseGraph
diff --git a/tools/compliance/policy/walk_test.go b/tools/compliance/policy_walk_test.go
similarity index 99%
rename from tools/compliance/policy/walk_test.go
rename to tools/compliance/policy_walk_test.go
index a2ec6e7..92867f9 100644
--- a/tools/compliance/policy/walk_test.go
+++ b/tools/compliance/policy_walk_test.go
@@ -620,7 +620,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedRs := toResolutionSet(lg, tt.expectedResolutions)
@@ -1228,7 +1228,7 @@
 			stderr := &bytes.Buffer{}
 			lg, err := toGraph(stderr, tt.roots, tt.edges)
 			if err != nil {
-				t.Errorf("unexpected test data error: got %w, want no error", err)
+				t.Errorf("unexpected test data error: got %s, want no error", err)
 				return
 			}
 			expectedAs := toActionSet(lg, tt.expectedActions)
diff --git a/tools/compliance/readgraph.go b/tools/compliance/readgraph.go
index c809a96..6f91e1c 100644
--- a/tools/compliance/readgraph.go
+++ b/tools/compliance/readgraph.go
@@ -185,7 +185,7 @@
 
 // addDependencies converts the proto AnnotatedDependencies into `edges`
 func addDependencies(lg *LicenseGraph, tn *TargetNode) error {
-	tn.edges = make(TargetEdgeList, 0,len(tn.proto.Deps))
+	tn.edges = make(TargetEdgeList, 0, len(tn.proto.Deps))
 	for _, ad := range tn.proto.Deps {
 		dependency := ad.GetFile()
 		if len(dependency) == 0 {
diff --git a/tools/compliance/readgraph_test.go b/tools/compliance/readgraph_test.go
index 6ff7a6c..bcf9f39 100644
--- a/tools/compliance/readgraph_test.go
+++ b/tools/compliance/readgraph_test.go
@@ -88,13 +88,13 @@
 			lg, err := ReadLicenseGraph(tt.fs, stderr, tt.roots)
 			if err != nil {
 				if len(tt.expectedError) == 0 {
-					t.Errorf("unexpected error: got %w, want no error", err)
+					t.Errorf("unexpected error: got %s, want no error", err)
 				} else if !strings.Contains(err.Error(), tt.expectedError) {
-					t.Errorf("unexpected error: got %w, want %q", err, tt.expectedError)
+					t.Errorf("unexpected error: got %s, want %q", err, tt.expectedError)
 				}
 				return
 			}
-			if 0 < len(tt.expectedError) {
+			if len(tt.expectedError) > 0 {
 				t.Errorf("unexpected success: got no error, want %q err", tt.expectedError)
 				return
 			}
diff --git a/tools/compliance/resolution.go b/tools/compliance/resolution.go
index 6f15ca3..acc61e2 100644
--- a/tools/compliance/resolution.go
+++ b/tools/compliance/resolution.go
@@ -66,7 +66,7 @@
 type ResolutionList []Resolution
 
 // Len returns the count of elements in the list.
-func (l ResolutionList) Len() int      { return len(l) }
+func (l ResolutionList) Len() int { return len(l) }
 
 // Swap rearranges 2 elements so that each occupies the other's former position.
 func (l ResolutionList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
diff --git a/tools/compliance/resolutionset.go b/tools/compliance/resolutionset.go
index 893ef26..7c8f333 100644
--- a/tools/compliance/resolutionset.go
+++ b/tools/compliance/resolutionset.go
@@ -65,7 +65,6 @@
 	return result
 }
 
-
 // AttachesToTarget returns true if the set contains conditions that
 // are `attachedTo`.
 func (rs ResolutionSet) AttachesToTarget(target *TargetNode) bool {
@@ -73,7 +72,6 @@
 	return isPresent
 }
 
-
 // Resolutions returns the list of resolutions that `attachedTo`
 // target must resolve. Returns empty list if no conditions apply.
 func (rs ResolutionSet) Resolutions(attachesTo *TargetNode) ResolutionList {
@@ -88,6 +86,22 @@
 	return result
 }
 
+// AllActions returns the set of actions required to resolve the set omitting
+// the attachment.
+func (rs ResolutionSet) AllActions() ActionSet {
+	result := make(ActionSet)
+	for _, as := range rs {
+		for actsOn, cs := range as {
+			if _, ok := result[actsOn]; ok {
+				result[actsOn] = cs.Union(result[actsOn])
+			} else {
+				result[actsOn] = cs
+			}
+		}
+	}
+	return result
+}
+
 // String returns a human-readable string representation of the set.
 func (rs ResolutionSet) String() string {
 	var sb strings.Builder
diff --git a/tools/compliance/test_util.go b/tools/compliance/test_util.go
index 8f4088a..26d7461 100644
--- a/tools/compliance/test_util.go
+++ b/tools/compliance/test_util.go
@@ -33,56 +33,56 @@
 
 	// GPL starts a test metadata file for GPL 2.0 licensing.
 	GPL = `` +
-`package_name: "Free Software"
+		`package_name: "Free Software"
 license_kinds: "SPDX-license-identifier-GPL-2.0"
 license_conditions: "restricted"
 `
 
 	// Classpath starts a test metadata file for GPL 2.0 with classpath exception licensing.
 	Classpath = `` +
-`package_name: "Free Software"
+		`package_name: "Free Software"
 license_kinds: "SPDX-license-identifier-GPL-2.0-with-classpath-exception"
 license_conditions: "restricted"
 `
 
 	// DependentModule starts a test metadata file for a module in the same package as `Classpath`.
 	DependentModule = `` +
-`package_name: "Free Software"
+		`package_name: "Free Software"
 license_kinds: "SPDX-license-identifier-MIT"
 license_conditions: "notice"
 `
 
 	// LGPL starts a test metadata file for a module with LGPL 2.0 licensing.
 	LGPL = `` +
-`package_name: "Free Library"
+		`package_name: "Free Library"
 license_kinds: "SPDX-license-identifier-LGPL-2.0"
 license_conditions: "restricted"
 `
 
 	// MPL starts a test metadata file for a module with MPL 2.0 reciprical licensing.
 	MPL = `` +
-`package_name: "Reciprocal"
+		`package_name: "Reciprocal"
 license_kinds: "SPDX-license-identifier-MPL-2.0"
 license_conditions: "reciprocal"
 `
 
 	// MIT starts a test metadata file for a module with generic notice (MIT) licensing.
 	MIT = `` +
-`package_name: "Android"
+		`package_name: "Android"
 license_kinds: "SPDX-license-identifier-MIT"
 license_conditions: "notice"
 `
 
 	// Proprietary starts a test metadata file for a module with proprietary licensing.
 	Proprietary = `` +
-`package_name: "Android"
+		`package_name: "Android"
 license_kinds: "legacy_proprietary"
 license_conditions: "proprietary"
 `
 
 	// ByException starts a test metadata file for a module with by_exception_only licensing.
 	ByException = `` +
-`package_name: "Special"
+		`package_name: "Special"
 license_kinds: "legacy_by_exception_only"
 license_conditions: "by_exception_only"
 `
@@ -91,22 +91,22 @@
 var (
 	// meta maps test file names to metadata file content without dependencies.
 	meta = map[string]string{
-		"apacheBin.meta_lic": AOSP,
-		"apacheLib.meta_lic": AOSP,
-		"apacheContainer.meta_lic": AOSP + "is_container: true\n",
-		"dependentModule.meta_lic": DependentModule,
+		"apacheBin.meta_lic":                 AOSP,
+		"apacheLib.meta_lic":                 AOSP,
+		"apacheContainer.meta_lic":           AOSP + "is_container: true\n",
+		"dependentModule.meta_lic":           DependentModule,
 		"gplWithClasspathException.meta_lic": Classpath,
-		"gplBin.meta_lic": GPL,
-		"gplLib.meta_lic": GPL,
-		"gplContainer.meta_lic": GPL + "is_container: true\n",
-		"lgplBin.meta_lic": LGPL,
-		"lgplLib.meta_lic": LGPL,
-		"mitBin.meta_lic": MIT,
-		"mitLib.meta_lic": MIT,
-		"mplBin.meta_lic": MPL,
-		"mplLib.meta_lic": MPL,
-		"proprietary.meta_lic": Proprietary,
-		"by_exception.meta_lic": ByException,
+		"gplBin.meta_lic":                    GPL,
+		"gplLib.meta_lic":                    GPL,
+		"gplContainer.meta_lic":              GPL + "is_container: true\n",
+		"lgplBin.meta_lic":                   LGPL,
+		"lgplLib.meta_lic":                   LGPL,
+		"mitBin.meta_lic":                    MIT,
+		"mitLib.meta_lic":                    MIT,
+		"mplBin.meta_lic":                    MPL,
+		"mplLib.meta_lic":                    MPL,
+		"proprietary.meta_lic":               Proprietary,
+		"by_exception.meta_lic":              ByException,
 	}
 )
 
@@ -204,7 +204,7 @@
 type byEdge []edge
 
 // Len returns the count of elements in the slice.
-func (l byEdge) Len() int      { return len(l) }
+func (l byEdge) Len() int { return len(l) }
 
 // Swap rearranges 2 elements of the slice so that each occupies the other's
 // former position.
@@ -219,7 +219,6 @@
 	return l[i].target < l[j].target
 }
 
-
 // annotated describes annotated test data edges to define test graphs.
 type annotated struct {
 	target, dep string
@@ -240,7 +239,7 @@
 	if e.dep != other.dep {
 		return false
 	}
-        if len(e.annotations) != len(other.annotations) {
+	if len(e.annotations) != len(other.annotations) {
 		return false
 	}
 	a1 := append([]string{}, e.annotations...)
@@ -401,7 +400,7 @@
 }
 
 // Len returns the count of elements in the slice.
-func (l actionList) Len() int      { return len(l) }
+func (l actionList) Len() int { return len(l) }
 
 // Swap rearranges 2 elements of the slice so that each occupies the other's
 // former position.
@@ -467,10 +466,10 @@
 		oprivacy := fields[0]
 		cprivacy := fields[1]
 		result = append(result, SourceSharePrivacyConflict{
-				newTestNode(lg, c.sourceNode),
-				newTestCondition(lg, oshare, cshare),
-				newTestCondition(lg, oprivacy, cprivacy),
-			})
+			newTestNode(lg, c.sourceNode),
+			newTestCondition(lg, oshare, cshare),
+			newTestCondition(lg, oprivacy, cprivacy),
+		})
 	}
 	return result
 }
diff --git a/tools/droiddoc/Android.bp b/tools/droiddoc/Android.bp
index efd30c1..71d4939 100644
--- a/tools/droiddoc/Android.bp
+++ b/tools/droiddoc/Android.bp
@@ -14,15 +14,22 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    //   SPDX-license-identifier-BSD
-    //   SPDX-license-identifier-CC-BY
-    //   SPDX-license-identifier-GPL
-    //   SPDX-license-identifier-MIT
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: [
+        "Android-Apache-2.0",
+        "build_make_tools_droiddoc_license",
+    ],
+}
+
+license {
+    name: "build_make_tools_droiddoc_license",
+    package_name: "Android Droiddoc Templates",
+    license_kinds: [
+        "SPDX-license-identifier-BSD",
+        "SPDX-license-identifier-CC-BY-2.5",
+        "SPDX-license-identifier-GPL-3.0",
+        "SPDX-license-identifier-MIT",
+    ],
+    license_text: ["LICENSE"],
 }
 
 droiddoc_exported_dir {
diff --git a/tools/droiddoc/LICENSE b/tools/droiddoc/LICENSE
new file mode 100644
index 0000000..b591dde
--- /dev/null
+++ b/tools/droiddoc/LICENSE
@@ -0,0 +1,1095 @@
+-----------------------------------------------------
+microtemplate.js
+
+// Simple JavaScript Templating
+// John Resig - http://ejohn.org/ - MIT Licensed
+
+-----------------------------------------------------
+jquery-history.js
+
+/**
+ * jQuery history event v0.1
+ * Copyright (c) 2008 Tom Rodenberg <tarodenberg gmail com>
+ * Licensed under the GPL (http://www.gnu.org/licenses/gpl.html) license.
+ */
+
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
+
+-----------------------------------------------------
+yui-3.3.0-reset-min.css
+
+/*
+Copyright (c) 2010, Yahoo! Inc. All rights reserved.
+Code licensed under the BSD License:
+http://developer.yahoo.com/yui/license.html
+version: 3.3.0
+build: 3167
+*/
+
+
+Software License Agreement (BSD License)
+Copyright (c) 2010, Yahoo! Inc.
+All rights reserved.
+
+Redistribution and use of this software in source and binary forms, with or
+without modification, are permitted provided that the following conditions are
+met:
+
+    Redistributions of source code must retain the above copyright notice, this
+    list of conditions and the following disclaimer.
+    Redistributions in binary form must reproduce the above copyright notice,
+    this list of conditions and the following disclaimer in the documentation
+    and/or other materials provided with the distribution.
+    Neither the name of Yahoo! Inc. nor the names of its contributors may be
+    used to endorse or promote products derived from this software without
+    specific prior written permission of Yahoo! Inc.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+Sources of Intellectual Property Included in the YUI Library
+
+YUI is issued by Yahoo! under the BSD license above. Below is a list of certain
+publicly available software that is the source of intellectual property in YUI,
+along with the licensing terms that pertain to thosesources of IP. This list is
+for informational purposes only and is not intended to represent an exhaustive
+list of third party contributions to the YUI.
+
+    Douglas Crockford's JSON parsing and stringifying methods: In the JSON
+    Utility, Douglas Crockford's JSON parsing and stringifying methods are
+    adapted from work published at JSON.org. The adapted work is in the public
+    domain.
+
+    Robert Penner's animation-easing algorithms: In the Animation Utility, YUI
+    makes use of Robert Penner's algorithms for easing.
+
+    Geoff Stearns's SWFObject: In the Charts Control and the Uploader versions
+    through 2.7.0, YUI makes use of Geoff Stearns's SWFObject v1.5 for Flash
+    Player detection and embedding. More information on SWFObject can be found
+    here (http://blog.deconcept.com/swfobject/). SWFObject is (c) 2007 Geoff
+    Stearns and is released under the MIT License
+    (http://www.opensource.org/licenses/mit-license.php).
+
+    Diego Perini's IEContentLoaded technique: The Event Utility employs a
+    technique developed by Diego Perini and licensed under GPL. YUI's use of
+    this technique is included under our BSD license with the author's
+    permission.
+
+
+From MIT license link above:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+-----------------------------------------------------
+customizations.cs
+
+  Except as noted, this content is 
+  licensed under <a href="http://creativecommons.org/licenses/by/2.5/">
+  Creative Commons Attribution 2.5</a>.
+
+
+Creative Commons
+Creative Commons Legal Code
+
+Attribution 2.5
+CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE LEGAL
+SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN ATTORNEY-CLIENT
+RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS INFORMATION ON AN "AS-IS" BASIS.
+CREATIVE COMMONS MAKES NO WARRANTIES REGARDING THE INFORMATION PROVIDED, AND
+DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM ITS USE.
+
+License
+
+THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE
+COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY
+COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS
+AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED.
+
+BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE
+BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS
+CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND
+CONDITIONS.
+
+1. Definitions
+
+    "Collective Work" means a work, such as a periodical issue, anthology or
+    encyclopedia, in which the Work in its entirety in unmodified form, along
+    with a number of other contributions, constituting separate and independent
+    works in themselves, are assembled into a collective whole. A work that
+    constitutes a Collective Work will not be considered a Derivative Work (as
+    defined below) for the purposes of this License.
+
+    "Derivative Work" means a work based upon the Work or upon the Work and
+    other pre-existing works, such as a translation, musical arrangement,
+    dramatization, fictionalization, motion picture version, sound recording,
+    art reproduction, abridgment, condensation, or any other form in which the
+    Work may be recast, transformed, or adapted, except that a work that
+    constitutes a Collective Work will not be considered a Derivative Work for
+    the purpose of this License. For the avoidance of doubt, where the Work is
+    a musical composition or sound recording, the synchronization of the Work
+    in timed-relation with a moving image ("synching") will be considered a
+    Derivative Work for the purpose of this License.
+
+    "Licensor" means the individual or entity that offers the Work under the
+    terms of this License.
+
+    "Original Author" means the individual or entity who created the Work.
+
+    "Work" means the copyrightable work of authorship offered under the terms
+    of this License.
+
+    "You" means an individual or entity exercising rights under this License
+    who has not previously violated the terms of this License with respect to
+    the Work, or who has received express permission from the Licensor to
+    exercise rights under this License despite a previous violation.
+
+2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or
+restrict any rights arising from fair use, first sale or other limitations on
+the exclusive rights of the copyright owner under copyright law or other
+applicable laws.
+
+3. License Grant. Subject to the terms and conditions of this License, Licensor
+hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the
+duration of the applicable copyright) license to exercise the rights in the
+Work as stated below:
+
+    to reproduce the Work, to incorporate the Work into one or more Collective
+    Works, and to reproduce the Work as incorporated in the Collective Works;
+
+    to create and reproduce Derivative Works;
+
+    to distribute copies or phonorecords of, display publicly, perform
+    publicly, and perform publicly by means of a digital audio transmission the
+    Work including as incorporated in Collective Works;
+
+    to distribute copies or phonorecords of, display publicly, perform
+    publicly, and perform publicly by means of a digital audio transmission
+    Derivative Works.
+
+    For the avoidance of doubt, where the work is a musical composition:
+        Performance Royalties Under Blanket Licenses. Licensor waives the
+        exclusive right to collect, whether individually or via a performance
+        rights society (e.g. ASCAP, BMI, SESAC), royalties for the public
+        performance or public digital performance (e.g. webcast) of the Work.
+
+        Mechanical Rights and Statutory Royalties. Licensor waives the
+        exclusive right to collect, whether individually or via a music rights
+        agency or designated agent (e.g. Harry Fox Agency), royalties for any
+        phonorecord You create from the Work ("cover version") and distribute,
+        subject to the compulsory license created by 17 USC Section 115 of the
+        US Copyright Act (or the equivalent in other jurisdictions).
+
+    Webcasting Rights and Statutory Royalties. For the avoidance of doubt,
+    where the Work is a sound recording, Licensor waives the exclusive right to
+    collect, whether individually or via a performance-rights society (e.g.
+    SoundExchange), royalties for the public digital performance (e.g. webcast)
+    of the Work, subject to the compulsory license created by 17 USC Section
+    114 of the US Copyright Act (or the equivalent in other jurisdictions).
+
+The above rights may be exercised in all media and formats whether now known or
+hereafter devised. The above rights include the right to make such
+modifications as are technically necessary to exercise the rights in other
+media and formats. All rights not expressly granted by Licensor are hereby
+reserved.
+
+4. Restrictions.The license granted in Section 3 above is expressly made
+subject to and limited by the following restrictions:
+
+    You may distribute, publicly display, publicly perform, or publicly
+    digitally perform the Work only under the terms of this License, and You
+    must include a copy of, or the Uniform Resource Identifier for, this
+    License with every copy or phonorecord of the Work You distribute, publicly
+    display, publicly perform, or publicly digitally perform. You may not offer
+    or impose any terms on the Work that alter or restrict the terms of this
+    License or the recipients' exercise of the rights granted hereunder. You
+    may not sublicense the Work. You must keep intact all notices that refer to
+    this License and to the disclaimer of warranties. You may not distribute,
+    publicly display, publicly perform, or publicly digitally perform the Work
+    with any technological measures that control access or use of the Work in a
+    manner inconsistent with the terms of this License Agreement. The above
+    applies to the Work as incorporated in a Collective Work, but this does not
+    require the Collective Work apart from the Work itself to be made subject
+    to the terms of this License. If You create a Collective Work, upon notice
+    from any Licensor You must, to the extent practicable, remove from the
+    Collective Work any credit as required by clause 4(b), as requested. If You
+    create a Derivative Work, upon notice from any Licensor You must, to the
+    extent practicable, remove from the Derivative Work any credit as required
+    by clause 4(b), as requested.
+
+    If you distribute, publicly display, publicly perform, or publicly
+    digitally perform the Work or any Derivative Works or Collective Works, You
+    must keep intact all copyright notices for the Work and provide, reasonable
+    to the medium or means You are utilizing: (i) the name of the Original
+    Author (or pseudonym, if applicable) if supplied, and/or (ii) if the
+    Original Author and/or Licensor designate another party or parties (e.g. a
+    sponsor institute, publishing entity, journal) for attribution in
+    Licensor's copyright notice, terms of service or by other reasonable means,
+    the name of such party or parties; the title of the Work if supplied; to
+    the extent reasonably practicable, the Uniform Resource Identifier, if any,
+    that Licensor specifies to be associated with the Work, unless such URI
+    does not refer to the copyright notice or licensing information for the
+    Work; and in the case of a Derivative Work, a credit identifying the use of
+    the Work in the Derivative Work (e.g., "French translation of the Work by
+    Original Author," or "Screenplay based on original Work by Original
+    Author"). Such credit may be implemented in any reasonable manner;
+    provided, however, that in the case of a Derivative Work or Collective
+    Work, at a minimum such credit will appear where any other comparable
+    authorship credit appears and in a manner at least as prominent as such
+    other comparable authorship credit.
+
+5. Representations, Warranties and Disclaimer
+
+UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS
+THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND
+CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING,
+WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A
+PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS,
+ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE.
+SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH
+EXCLUSION MAY NOT APPLY TO YOU.
+
+6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN
+NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL,
+INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS
+LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+7. Termination
+
+    This License and the rights granted hereunder will terminate automatically
+    upon any breach by You of the terms of this License. Individuals or
+    entities who have received Derivative Works or Collective Works from You
+    under this License, however, will not have their licenses terminated
+    provided such individuals or entities remain in full compliance with those
+    licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of
+    this License.
+
+    Subject to the above terms and conditions, the license granted here is
+    perpetual (for the duration of the applicable copyright in the Work).
+    Notwithstanding the above, Licensor reserves the right to release the Work
+    under different license terms or to stop distributing the Work at any time;
+    provided, however that any such election will not serve to withdraw this
+    License (or any other license that has been, or is required to be, granted
+    under the terms of this License), and this License will continue in full
+    force and effect unless terminated as stated above.
+
+8. Miscellaneous
+
+    Each time You distribute or publicly digitally perform the Work or a
+    Collective Work, the Licensor offers to the recipient a license to the Work
+    on the same terms and conditions as the license granted to You under this
+    License.
+
+    Each time You distribute or publicly digitally perform a Derivative Work,
+    Licensor offers to the recipient a license to the original Work on the same
+    terms and conditions as the license granted to You under this License.
+
+    If any provision of this License is invalid or unenforceable under
+    applicable law, it shall not affect the validity or enforceability of the
+    remainder of the terms of this License, and without further action by the
+    parties to this agreement, such provision shall be reformed to the minimum
+    extent necessary to make such provision valid and enforceable.
+
+    No term or provision of this License shall be deemed waived and no breach
+    consented to unless such waiver or consent shall be in writing and signed
+    by the party to be charged with such waiver or consent.
+
+    This License constitutes the entire agreement between the parties with
+    respect to the Work licensed here. There are no understandings, agreements
+    or representations with respect to the Work not specified here. Licensor
+    shall not be bound by any additional provisions that may appear in any
+    communication from You. This License may not be modified without the mutual
+    written agreement of the Licensor and You.
+
+Creative Commons is not a party to this License, and makes no warranty
+whatsoever in connection with the Work. Creative Commons will not be liable to
+You or any party on any legal theory for any damages whatsoever, including
+without limitation any general, special, incidental or consequential damages
+arising in connection to this license. Notwithstanding the foregoing two (2)
+sentences, if Creative Commons has expressly identified itself as the Licensor
+hereunder, it shall have all rights and obligations of Licensor.
+
+Except for the limited purpose of indicating to the public that the Work is
+licensed under the CCPL, neither party will use the trademark "Creative
+Commons" or any related trademark or logo of Creative Commons without the prior
+written consent of Creative Commons. Any permitted use will be in compliance
+with Creative Commons' then-current trademark usage guidelines, as may be
+published on its website or otherwise made available upon request from time to
+time.
+
+Creative Commons may be contacted at https://creativecommons.org/.
+
+-----------------------------------------------------
+jquery-resizable.min.js
+
+/*
+ * jQuery JavaScript Library v1.3.2
+ * http://jquery.com/
+ *
+ * Copyright (c) 2009 John Resig
+ * Dual licensed under the MIT and GPL licenses.
+ * http://docs.jquery.com/License
+ *
+ * Date: 2009-02-19 17:34:21 -0500 (Thu, 19 Feb 2009)
+ * Revision: 6246
+ */
+
+The MIT License (MIT)
+
+Copyright (c) 2009 John Resig
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+-----------------------------------------------------
+jquery-1.6.2.min.js
+
+/*!
+ * jQuery JavaScript Library v1.6.2
+ * http://jquery.com/
+ *
+ * Copyright 2011, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2011, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Thu Jun 30 14:16:56 2011 -0400
+ */
+
+The MIT License (MIT)
+
+Copyright (c) 2011 John Resig, and The Dojo Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 4544e07..8891a0a 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -14,11 +14,7 @@
 
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 bootstrap_go_package {
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 63cb4eb..c36c3aa 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -42,13 +42,14 @@
 vendor_capability_header := $(system_capability_header)
 endif
 
-# List of supported vendor, oem, odm, vendor_dlkm and odm_dlkm Partitions
+# List of supported vendor, oem, odm, vendor_dlkm, odm_dlkm, and system_dlkm Partitions
 fs_config_generate_extra_partition_list := $(strip \
   $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
   $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
   $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm) \
   $(if $(BOARD_USES_VENDOR_DLKMIMAGE)$(BOARD_VENDOR_DLKMIMAGE_FILE_SYSTEM_TYPE),vendor_dlkm) \
   $(if $(BOARD_USES_ODM_DLKMIMAGE)$(BOARD_ODM_DLKMIMAGE_FILE_SYSTEM_TYPE),odm_dlkm) \
+  $(if $(BOARD_USES_SYSTEM_DLKMIMAGE)$(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE),system_dlkm) \
 )
 
 ##################################
@@ -57,8 +58,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := \
   fs_config_dirs_system \
   fs_config_dirs_system_ext \
@@ -72,8 +74,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := \
   fs_config_files_system \
   fs_config_files_system_ext \
@@ -88,8 +91,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_system_ext
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_system_ext)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -100,8 +104,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_system_ext
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_system_ext)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -112,8 +117,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_product
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_product)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -124,8 +130,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_product
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_product)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -136,8 +143,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_nonsystem
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_dirs_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -148,8 +156,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_nonsystem
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_files_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -160,8 +169,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_system
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 include $(BUILD_SYSTEM)/base_rules.mk
@@ -187,8 +197,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_system
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 include $(BUILD_SYSTEM)/base_rules.mk
@@ -215,8 +226,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_vendor
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -241,8 +253,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_vendor
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -270,8 +283,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_oem
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -296,8 +310,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_oem
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -325,8 +340,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_odm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -351,8 +367,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_odm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -380,8 +397,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_vendor_dlkm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -406,8 +424,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_vendor_dlkm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -435,8 +454,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_odm_dlkm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -461,8 +481,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_odm_dlkm
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -482,6 +503,63 @@
 
 endif
 
+ifneq ($(filter system_dlkm,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the system_dlkm/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := _fs_config_dirs_system_dlkm
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+	@mkdir -p $(dir $@)
+	$< fsconfig \
+	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+	   --partition system_dlkm \
+	   --dirs \
+	   --out_file $@ \
+	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
+
+##################################
+# Generate the system_dlkm/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_nonsystem to PRODUCT_PACKAGES
+# in the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := _fs_config_files_system_dlkm
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_DLKM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(vendor_android_filesystem_config)
+$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_CAP_HDR := $(vendor_capability_header)
+$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(vendor_android_filesystem_config) $(vendor_capability_header)
+	@mkdir -p $(dir $@)
+	$< fsconfig \
+	   --aid-header $(PRIVATE_ANDROID_FS_HDR) \
+	   --capability-header $(PRIVATE_ANDROID_CAP_HDR) \
+	   --partition system_dlkm \
+	   --files \
+	   --out_file $@ \
+	   $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null)
+
+endif
+
 ifneq ($(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),)
 ##################################
 # Generate the product/etc/fs_config_dirs binary file for the target
@@ -490,8 +568,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_product
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -516,8 +595,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_product
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -544,8 +624,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_system_ext
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
@@ -570,8 +651,9 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_system_ext
-LOCAL_LICENSE_KINDS := legacy_restricted
-LOCAL_LICENSE_CONDITIONS := restricted
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
diff --git a/tools/fs_get_stats/Android.bp b/tools/fs_get_stats/Android.bp
index 9457de4..0697999 100644
--- a/tools/fs_get_stats/Android.bp
+++ b/tools/fs_get_stats/Android.bp
@@ -1,10 +1,6 @@
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_binary_host {
diff --git a/tools/libhost/Android.bp b/tools/libhost/Android.bp
index a83f2e7..cd99af8 100644
--- a/tools/libhost/Android.bp
+++ b/tools/libhost/Android.bp
@@ -1,10 +1,6 @@
 package {
     // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "build_make_license"
-    // to get the below license kinds:
-    //   legacy_restricted
-    default_applicable_licenses: ["build_make_license"],
+    default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
 cc_library_host_static {
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index efbf614..38d17a8 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -38,11 +38,6 @@
       else:
         val = val + ",adb"
       prop_list.put("persist.sys.usb.config", val)
-  # UsbDeviceManager expects a value here.  If it doesn't get it, it will
-  # default to "adb". That might not the right policy there, but it's better
-  # to be explicit.
-  if not prop_list.get_value("persist.sys.usb.config"):
-    prop_list.put("persist.sys.usb.config", "none")
 
 def validate_grf_props(prop_list, sdk_version):
   """Validate GRF properties if exist.
diff --git a/tools/product_debug.py b/tools/product_debug.py
deleted file mode 100755
index ff2657c..0000000
--- a/tools/product_debug.py
+++ /dev/null
@@ -1,159 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-import sys
-
-def break_lines(key, val):
-  # these don't get split
-  if key in ("PRODUCT_MODEL"):
-    return (key,val)
-  return (key, "\n".join(val.split()))
-
-def split_line(line):
-  words = line.split("=", 1)
-  if len(words) == 1:
-    return (words[0], "")
-  else:
-    return (words[0], words[1])
-
-def sort_lines(text):
-  lines = text.split()
-  lines.sort()
-  return "\n".join(lines)
-
-def parse_variables(lines):
-  return [split_line(line) for line in lines if line.strip()]
-
-def render_variables(variables):
-  variables = dict(variables)
-  del variables["FILE"]
-  variables = list(variables.iteritems())
-  variables.sort(lambda a, b: cmp(a[0], b[0]))
-  return ("<table id='variables'>"
-      + "\n".join([ "<tr><th>%(key)s</th><td>%(val)s</td></tr>" % { "key": key, "val": val }
-        for key,val in variables])
-      +"</table>")
-
-def linkify_inherit(variables, text, func_name):
-  groups = re.split("(\\$\\(call " + func_name + ",.*\\))", text)
-  result = ""
-  for i in range(0,len(groups)/2):
-    i = i * 2
-    result = result + groups[i]
-    s = groups[i+1]
-    href = s.split(",", 1)[1].strip()[:-1]
-    href = href.replace("$(SRC_TARGET_DIR)", "build/target")
-    href = ("../" * variables["FILE"].count("/")) + href + ".html"
-    result = result + "<a href=\"%s\">%s</a>" % (href,s)
-  result = result + groups[-1]
-  return result
-
-def render_original(variables, text):
-  text = linkify_inherit(variables, text, "inherit-product")
-  text = linkify_inherit(variables, text, "inherit-product-if-exists")
-  return text
-
-def read_file(fn):
-  f = file(fn)
-  text = f.read()
-  f.close()
-  return text
-
-def main(argv):
-  # read the variables
-  lines = sys.stdin.readlines()
-  variables = parse_variables(lines)
-
-  # format the variables
-  variables = [break_lines(key,val) for key,val in variables]
-
-  # now it's a dict
-  variables = dict(variables)
-
-  sorted_vars = (
-      "PRODUCT_COPY_FILES",
-      "PRODUCT_PACKAGES",
-      "PRODUCT_LOCALES",
-      "PRODUCT_PROPERTY_OVERRIDES",
-    )
-
-  for key in sorted_vars:
-    variables[key] = sort_lines(variables[key])
-
-  # the original file
-  original = read_file(variables["FILE"])
-
-  # formatting
-  values = dict(variables)
-  values.update({
-    "variables": render_variables(variables),
-    "original": render_original(variables, original),
-  })
-  print """<html>
-
-
-<head>
-  <title>%(FILE)s</title>
-  <style type="text/css">
-    body {
-      font-family: Helvetica, Arial, sans-serif;
-      padding-bottom: 20px;
-    }
-    #variables {
-      border-collapse: collapse;
-    }
-    #variables th, #variables td {
-      vertical-align: top;
-      text-align: left;
-      border-top: 1px solid #c5cdde;
-      border-bottom: 1px solid #c5cdde;
-      padding: 2px 10px 2px 10px;
-    }
-    #variables th {
-      font-size: 10pt;
-      background-color: #e2ecff
-    }
-    #variables td {
-      background-color: #ebf2ff;
-      white-space: pre;
-      font-size: 10pt;
-    }
-    #original {
-      background-color: #ebf2ff;
-      border-top: 1px solid #c5cdde;
-      border-bottom: 1px solid #c5cdde;
-      padding: 2px 10px 2px 10px;
-      white-space: pre;
-      font-size: 10pt;
-    }
-  </style>
-</head>
-<body>
-<h1>%(FILE)s</h1>
-<a href="#Original">Original</a>
-<a href="#Variables">Variables</a>
-<h2><a name="Original"></a>Original</h2>
-<div id="original">%(original)s</div>
-<h2><a name="Variables"></a>Variables</h2>
-%(variables)s
-</body>
-</html>
-""" % values
-
-if __name__ == "__main__":
-  main(sys.argv)
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 7b2c290..d8e34b7 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -56,7 +56,9 @@
     required: [
         "blk_alloc_to_base_fs",
         "e2fsck",
-        "mkerofsimage.sh",
+        "fsck.erofs",
+        "img2simg",
+        "mkfs.erofs",
         "mkuserimg_mke2fs",
         "simg2img",
         "tune2fs",
@@ -438,46 +440,6 @@
 }
 
 python_binary_host {
-    name: "merge_builds",
-    defaults: ["releasetools_binary_defaults"],
-    srcs: [
-        "merge_builds.py",
-    ],
-    libs: [
-        "releasetools_build_super_image",
-        "releasetools_common",
-    ],
-}
-
-python_binary_host {
-    name: "merge_target_files",
-    defaults: ["releasetools_binary_defaults"],
-    srcs: [
-        "merge_target_files.py",
-    ],
-    libs: [
-        "releasetools_add_img_to_target_files",
-        "releasetools_build_super_image",
-        "releasetools_check_target_files_vintf",
-        "releasetools_common",
-        "releasetools_find_shareduid_violation",
-        "releasetools_img_from_target_files",
-        "releasetools_ota_from_target_files",
-    ],
-    required: [
-        "checkvintf",
-        "host_init_verifier",
-        "secilc",
-    ],
-    target: {
-        darwin: {
-            // libs dep "releasetools_ota_from_target_files" is disabled on darwin
-            enabled: false,
-        },
-    },
-}
-
-python_binary_host {
     name: "ota_from_target_files",
     defaults: [
         "releasetools_binary_defaults",
@@ -558,6 +520,7 @@
 
 python_binary_host {
     name: "fsverity_manifest_generator",
+    defaults: ["releasetools_binary_defaults"],
     srcs: [
         "fsverity_manifest_generator.py",
     ],
@@ -574,6 +537,7 @@
 
 python_binary_host {
     name: "fsverity_metadata_generator",
+    defaults: ["releasetools_binary_defaults"],
     srcs: [
         "fsverity_metadata_generator.py",
     ],
@@ -593,11 +557,12 @@
         "check_partition_sizes.py",
         "check_target_files_signatures.py",
         "make_recovery_patch.py",
-        "merge_target_files.py",
         "ota_package_parser.py",
         "sign_apex.py",
         "sign_target_files_apks.py",
         "validate_target_files.py",
+        ":releasetools_merge_sources",
+        ":releasetools_merge_tests",
 
         "test_*.py",
     ],
diff --git a/tools/releasetools/OWNERS b/tools/releasetools/OWNERS
index 9962836..59235e0 100644
--- a/tools/releasetools/OWNERS
+++ b/tools/releasetools/OWNERS
@@ -1,6 +1,3 @@
 elsk@google.com
 nhdo@google.com
-xunchang@google.com
-
-per-file merge_*.py = danielnorman@google.com
-
+zhangkelvin@google.com
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 0c39827..da7e11a 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -54,6 +54,7 @@
 import stat
 import sys
 import uuid
+import tempfile
 import zipfile
 
 import build_image
@@ -63,7 +64,7 @@
 import ota_metadata_pb2
 
 from apex_utils import GetApexInfoFromTargetFiles
-from common import AddCareMapForAbOta
+from common import AddCareMapForAbOta, ZipDelete
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -104,9 +105,10 @@
     if self._output_zip:
       self._zip_name = os.path.join(*args)
 
-  def Write(self):
+  def Write(self, compress_type=None):
     if self._output_zip:
-      common.ZipWrite(self._output_zip, self.name, self._zip_name)
+      common.ZipWrite(self._output_zip, self.name,
+                      self._zip_name, compress_type=compress_type)
 
 
 def AddSystem(output_zip, recovery_img=None, boot_img=None):
@@ -134,12 +136,13 @@
       "board_uses_vendorimage") == "true"
 
   if (OPTIONS.rebuild_recovery and not board_uses_vendorimage and
-      recovery_img is not None and boot_img is not None):
+          recovery_img is not None and boot_img is not None):
     logger.info("Building new recovery patch on system at system/vendor")
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
                              boot_img, info_dict=OPTIONS.info_dict)
 
-  block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.map")
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp,
+                          "IMAGES", "system.map")
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system", img,
               block_list=block_list)
   return img.name
@@ -167,27 +170,28 @@
     return img.name
 
   def output_sink(fn, data):
-    ofile = open(os.path.join(OPTIONS.input_tmp, "VENDOR", fn), "w")
-    ofile.write(data)
-    ofile.close()
+    output_file = os.path.join(OPTIONS.input_tmp, "VENDOR", fn)
+    with open(output_file, "wb") as ofile:
+      ofile.write(data)
 
     if output_zip:
       arc_name = "VENDOR/" + fn
       if arc_name in output_zip.namelist():
         OPTIONS.replace_updated_files_list.append(arc_name)
       else:
-        common.ZipWrite(output_zip, ofile.name, arc_name)
+        common.ZipWrite(output_zip, output_file, arc_name)
 
   board_uses_vendorimage = OPTIONS.info_dict.get(
       "board_uses_vendorimage") == "true"
 
   if (OPTIONS.rebuild_recovery and board_uses_vendorimage and
-      recovery_img is not None and boot_img is not None):
+          recovery_img is not None and boot_img is not None):
     logger.info("Building new recovery patch on vendor")
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
                              boot_img, info_dict=OPTIONS.info_dict)
 
-  block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
+  block_list = OutputFile(output_zip, OPTIONS.input_tmp,
+                          "IMAGES", "vendor.map")
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "vendor", img,
               block_list=block_list)
   return img.name
@@ -275,6 +279,21 @@
       block_list=block_list)
   return img.name
 
+def AddSystemDlkm(output_zip):
+  """Turn the contents of SystemDlkm into an system_dlkm image and store it in output_zip."""
+
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_dlkm.img")
+  if os.path.exists(img.name):
+    logger.info("system_dlkm.img already exists; no need to rebuild...")
+    return img.name
+
+  block_list = OutputFile(
+      output_zip, OPTIONS.input_tmp, "IMAGES", "system_dlkm.map")
+  CreateImage(
+      OPTIONS.input_tmp, OPTIONS.info_dict, "system_dlkm", img,
+      block_list=block_list)
+  return img.name
+
 
 def AddDtbo(output_zip):
   """Adds the DTBO image.
@@ -374,15 +393,16 @@
       key_path, algorithm, extra_args)
 
   for img_name in OPTIONS.info_dict.get(
-      "avb_{}_image_list".format(partition_name)).split():
-    custom_image = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", img_name)
+          "avb_{}_image_list".format(partition_name)).split():
+    custom_image = OutputFile(
+        output_zip, OPTIONS.input_tmp, "IMAGES", img_name)
     if os.path.exists(custom_image.name):
       continue
 
     custom_image_prebuilt_path = os.path.join(
         OPTIONS.input_tmp, "PREBUILT_IMAGES", img_name)
     assert os.path.exists(custom_image_prebuilt_path), \
-      "Failed to find %s at %s" % (img_name, custom_image_prebuilt_path)
+        "Failed to find %s at %s" % (img_name, custom_image_prebuilt_path)
 
     shutil.copy(custom_image_prebuilt_path, custom_image.name)
 
@@ -484,7 +504,9 @@
   build_image.BuildImage(user_dir, image_props, img.name)
 
   common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
-  img.Write()
+  # Always use compression for useradata image.
+  # As it's likely huge and consist of lots of 0s.
+  img.Write(zipfile.ZIP_DEFLATED)
 
 
 def AddVBMeta(output_zip, partitions, name, needed_partitions):
@@ -681,11 +703,11 @@
 
   return ((os.path.isdir(
       os.path.join(OPTIONS.input_tmp, partition_name.upper())) and
-           OPTIONS.info_dict.get(
-               "building_{}_image".format(partition_name)) == "true") or
-          os.path.exists(
-              os.path.join(OPTIONS.input_tmp, "IMAGES",
-                           "{}.img".format(partition_name))))
+      OPTIONS.info_dict.get(
+      "building_{}_image".format(partition_name)) == "true") or
+      os.path.exists(
+      os.path.join(OPTIONS.input_tmp, "IMAGES",
+                   "{}.img".format(partition_name))))
 
 
 def AddApexInfo(output_zip):
@@ -717,7 +739,7 @@
   boot_container = boot_images and (
       len(boot_images.split()) >= 2 or boot_images.split()[0] != 'boot.img')
   if (OPTIONS.info_dict.get("avb_enable") == "true" and not boot_container and
-      OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true"):
+          OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true"):
     avbtool = OPTIONS.info_dict["avb_avbtool"]
     digest = verity_utils.CalculateVbmetaDigest(OPTIONS.input_tmp, avbtool)
     vbmeta_digest_txt = os.path.join(OPTIONS.input_tmp, "META",
@@ -762,12 +784,13 @@
   has_init_boot = OPTIONS.info_dict.get("init_boot") == "true"
   has_vendor_boot = OPTIONS.info_dict.get("vendor_boot") == "true"
 
-  # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system, system_other}.img
+  # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system_dlkm, system, system_other}.img
   # can be built from source, or  dropped into target_files.zip as a prebuilt blob.
   has_vendor = HasPartition("vendor")
   has_odm = HasPartition("odm")
   has_vendor_dlkm = HasPartition("vendor_dlkm")
   has_odm_dlkm = HasPartition("odm_dlkm")
+  has_system_dlkm = HasPartition("system_dlkm")
   has_product = HasPartition("product")
   has_system_ext = HasPartition("system_ext")
   has_system = HasPartition("system")
@@ -804,7 +827,7 @@
     boot_images = OPTIONS.info_dict.get("boot_images")
     if boot_images is None:
       boot_images = "boot.img"
-    for index,b in enumerate(boot_images.split()):
+    for index, b in enumerate(boot_images.split()):
       # common.GetBootableImage() returns the image directly if present.
       boot_image = common.GetBootableImage(
           "IMAGES/" + b, b, OPTIONS.input_tmp, "BOOT")
@@ -825,7 +848,8 @@
     init_boot_image = common.GetBootableImage(
         "IMAGES/init_boot.img", "init_boot.img", OPTIONS.input_tmp, "INIT_BOOT")
     if init_boot_image:
-      partitions['init_boot'] = os.path.join(OPTIONS.input_tmp, "IMAGES", "init_boot.img")
+      partitions['init_boot'] = os.path.join(
+          OPTIONS.input_tmp, "IMAGES", "init_boot.img")
       if not os.path.exists(partitions['init_boot']):
         init_boot_image.WriteToDir(OPTIONS.input_tmp)
         if output_zip:
@@ -883,6 +907,7 @@
       ("odm", has_odm, AddOdm, []),
       ("vendor_dlkm", has_vendor_dlkm, AddVendorDlkm, []),
       ("odm_dlkm", has_odm_dlkm, AddOdmDlkm, []),
+      ("system_dlkm", has_system_dlkm, AddSystemDlkm, []),
       ("system_other", has_system_other, AddSystemOther, []),
   )
   for call in add_partition_calls:
@@ -951,7 +976,7 @@
 
   if OPTIONS.info_dict.get("build_super_partition") == "true":
     if OPTIONS.info_dict.get(
-        "build_retrofit_dynamic_partitions_ota_package") == "true":
+            "build_retrofit_dynamic_partitions_ota_package") == "true":
       banner("super split images")
       AddSuperSplit(output_zip)
 
@@ -988,6 +1013,36 @@
                           OPTIONS.replace_updated_files_list)
 
 
+def OptimizeCompressedEntries(zipfile_path):
+  """Convert files that do not compress well to uncompressed storage
+
+  EROFS images tend to be compressed already, so compressing them again
+  yields little space savings. Leaving them uncompressed will make
+  downstream tooling's job easier, and save compute time.
+  """
+  if not zipfile.is_zipfile(zipfile_path):
+    return
+  entries_to_store = []
+  with tempfile.TemporaryDirectory() as tmpdir:
+    with zipfile.ZipFile(zipfile_path, "r", allowZip64=True) as zfp:
+      for zinfo in zfp.filelist:
+        if not zinfo.filename.startswith("IMAGES/") and not zinfo.filename.startswith("META"):
+          continue
+        # Don't try to store userdata.img uncompressed, it's usually huge.
+        if zinfo.filename.endswith("userdata.img"):
+          continue
+        if zinfo.compress_size > zinfo.file_size * 0.80 and zinfo.compress_type != zipfile.ZIP_STORED:
+          entries_to_store.append(zinfo)
+          zfp.extract(zinfo, tmpdir)
+    if len(entries_to_store) == 0:
+      return
+    # Remove these entries, then re-add them as ZIP_STORED
+    ZipDelete(zipfile_path, [entry.filename for entry in entries_to_store])
+    with zipfile.ZipFile(zipfile_path, "a", allowZip64=True) as zfp:
+      for entry in entries_to_store:
+        zfp.write(os.path.join(tmpdir, entry.filename), entry.filename, compress_type=zipfile.ZIP_STORED)
+
+
 def main(argv):
   def option_handler(o, a):
     if o in ("-a", "--add_missing"):
@@ -1019,8 +1074,10 @@
   common.InitLogging()
 
   AddImagesToTargetFiles(args[0])
+  OptimizeCompressedEntries(args[0])
   logger.info("done.")
 
+
 if __name__ == '__main__':
   try:
     common.CloseInheritedPipes()
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index ee0feae..3f13a4a 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -54,7 +54,7 @@
 class ApexApkSigner(object):
   """Class to sign the apk files and other files in an apex payload image and repack the apex"""
 
-  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None):
+  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None, fsverity_tool=None):
     self.apex_path = apex_path
     if not key_passwords:
       self.key_passwords = dict()
@@ -65,8 +65,9 @@
         OPTIONS.search_path, "bin", "debugfs_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
+    self.fsverity_tool = fsverity_tool if fsverity_tool else "fsverity"
 
-  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
+  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None):
     """Scans and signs the payload files and repack the apex
 
     Args:
@@ -84,10 +85,14 @@
                 self.debugfs_path, 'list', self.apex_path]
     entries_names = common.RunAndCheckOutput(list_cmd).split()
     apk_entries = [name for name in entries_names if name.endswith('.apk')]
+    sepolicy_entries = []
+    if is_sepolicy:
+      sepolicy_entries = [name for name in entries_names if
+          name.startswith('./etc/SEPolicy') and name.endswith('.zip')]
 
     # No need to sign and repack, return the original apex path.
-    if not apk_entries and self.sign_tool is None:
-      logger.info('No apk file to sign in %s', self.apex_path)
+    if not apk_entries and not sepolicy_entries and self.sign_tool is None:
+      logger.info('No payload (apk or zip) file to sign in %s', self.apex_path)
       return self.apex_path
 
     for entry in apk_entries:
@@ -101,15 +106,16 @@
         logger.warning('Apk path does not contain the intended directory name:'
                        ' %s', entry)
 
-    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
-        apk_entries, apk_keys, payload_key)
+    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(apk_entries,
+        apk_keys, payload_key, sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args)
     if not has_signed_content:
       logger.info('No contents has been signed in %s', self.apex_path)
       return self.apex_path
 
     return self.RepackApexPayload(payload_dir, payload_key, signing_args)
 
-  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key):
+  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key,
+  sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args):
     """Extracts the payload image and signs the containing apk files."""
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
@@ -141,14 +147,54 @@
           codename_to_api_level_map=self.codename_to_api_level_map)
       has_signed_content = True
 
+    for entry in sepolicy_entries:
+      sepolicy_key = sepolicy_key if sepolicy_key else payload_key
+      self.SignSePolicy(payload_dir, entry, sepolicy_key, sepolicy_cert)
+      has_signed_content = True
+
     if self.sign_tool:
       logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
-      cmd = [self.sign_tool, '--avbtool', self.avbtool, payload_key, payload_dir]
+      # Pass avbtool to the custom signing tool
+      cmd = [self.sign_tool, '--avbtool', self.avbtool]
+      # Pass signing_args verbatim which will be forwarded to avbtool (e.g. --signing_helper=...)
+      if signing_args:
+        cmd.extend(['--signing_args', '"{}"'.format(signing_args)])
+      cmd.extend([payload_key, payload_dir])
       common.RunAndCheckOutput(cmd)
       has_signed_content = True
 
     return payload_dir, has_signed_content
 
+  def SignSePolicy(self, payload_dir, sepolicy_zip, sepolicy_key, sepolicy_cert):
+    sepolicy_sig = sepolicy_zip + '.sig'
+    sepolicy_fsv_sig = sepolicy_zip + '.fsv_sig'
+
+    policy_zip_path = os.path.join(payload_dir, sepolicy_zip)
+    sig_out_path = os.path.join(payload_dir, sepolicy_sig)
+    sig_old = sig_out_path + '.old'
+    if os.path.exists(sig_out_path):
+      os.rename(sig_out_path, sig_old)
+    sign_cmd = ['openssl', 'dgst', '-sign', sepolicy_key, '-keyform', 'PEM', '-sha256',
+        '-out', sig_out_path, '-binary', policy_zip_path]
+    common.RunAndCheckOutput(sign_cmd)
+    if os.path.exists(sig_old):
+      os.remove(sig_old)
+
+    if not sepolicy_cert:
+      logger.info('No cert provided for SEPolicy, skipping fsverity sign')
+      return
+
+    fsv_sig_out_path = os.path.join(payload_dir, sepolicy_fsv_sig)
+    fsv_sig_old = fsv_sig_out_path + '.old'
+    if os.path.exists(fsv_sig_out_path):
+      os.rename(fsv_sig_out_path, fsv_sig_old)
+
+    fsverity_cmd = [self.fsverity_tool, 'sign', policy_zip_path, fsv_sig_out_path,
+        '--key=' + sepolicy_key, '--cert=' + sepolicy_cert]
+    common.RunAndCheckOutput(fsverity_cmd)
+    if os.path.exists(fsv_sig_old):
+      os.remove(fsv_sig_old)
+
   def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
     """Rebuilds the apex file with the updated payload directory."""
     apex_dir = common.MakeTempDir()
@@ -168,7 +214,7 @@
       if os.path.isfile(path):
         os.remove(path)
       elif os.path.isdir(path):
-        shutil.rmtree(path)
+        shutil.rmtree(path, ignore_errors=True)
 
     # TODO(xunchang) the signing process can be improved by using
     # '--unsigned_payload_only'. But we need to parse the vbmeta earlier for
@@ -319,7 +365,9 @@
 
 def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
                          container_pw, apk_keys, codename_to_api_level_map,
-                         no_hashtree, signing_args=None, sign_tool=None):
+                         no_hashtree, signing_args=None, sign_tool=None,
+                         is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
+                         fsverity_tool=None):
   """Signs the current uncompressed APEX with the given payload/container keys.
 
   Args:
@@ -332,6 +380,10 @@
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
     sign_tool: A tool to sign the contents of the APEX.
+    is_sepolicy: Indicates if the apex is a sepolicy.apex
+    sepolicy_key: Key to sign a sepolicy zip.
+    sepolicy_cert: Cert to sign a sepolicy zip.
+    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -340,8 +392,9 @@
   # the apex file after signing.
   apk_signer = ApexApkSigner(apex_file, container_pw,
                              codename_to_api_level_map,
-                             avbtool, sign_tool)
-  apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key, signing_args)
+                             avbtool, sign_tool, fsverity_tool)
+  apex_file = apk_signer.ProcessApexFile(
+      apk_keys, payload_key, signing_args, is_sepolicy, sepolicy_key, sepolicy_cert)
 
   # 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
   # payload_key.
@@ -395,7 +448,9 @@
 
 def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
                        container_pw, apk_keys, codename_to_api_level_map,
-                       no_hashtree, signing_args=None, sign_tool=None):
+                       no_hashtree, signing_args=None, sign_tool=None,
+                       is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
+                       fsverity_tool=None):
   """Signs the current compressed APEX with the given payload/container keys.
 
   Args:
@@ -407,6 +462,10 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
+    is_sepolicy: Indicates if the apex is a sepolicy.apex
+    sepolicy_key: Key to sign a sepolicy zip.
+    sepolicy_cert: Cert to sign a sepolicy zip.
+    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -433,7 +492,11 @@
       codename_to_api_level_map,
       no_hashtree,
       signing_args,
-      sign_tool)
+      sign_tool,
+      is_sepolicy,
+      sepolicy_key,
+      sepolicy_cert,
+      fsverity_tool)
 
   # 3. Compress signed original apex.
   compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -461,7 +524,8 @@
 
 def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
              apk_keys, codename_to_api_level_map,
-             no_hashtree, signing_args=None, sign_tool=None):
+             no_hashtree, signing_args=None, sign_tool=None,
+             is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
   """Signs the current APEX with the given payload/container keys.
 
   Args:
@@ -473,6 +537,9 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
+    sepolicy_key: Key to sign a sepolicy zip.
+    sepolicy_cert: Cert to sign a sepolicy zip.
+    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -498,7 +565,11 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool)
+          sign_tool=sign_tool,
+          is_sepolicy=is_sepolicy,
+          sepolicy_key=sepolicy_key,
+          sepolicy_cert=sepolicy_cert,
+          fsverity_tool=fsverity_tool)
     elif apex_type == 'COMPRESSED':
       return SignCompressedApex(
           avbtool,
@@ -510,7 +581,11 @@
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
-          sign_tool=sign_tool)
+          sign_tool=sign_tool,
+          is_sepolicy=is_sepolicy,
+          sepolicy_key=sepolicy_key,
+          sepolicy_cert=sepolicy_cert,
+          fsverity_tool=fsverity_tool)
     else:
       # TODO(b/172912232): support signing compressed apex
       raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index a4377c7..e33b581 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -268,18 +268,19 @@
   """
   build_command = []
   fs_type = prop_dict.get("fs_type", "")
-  run_e2fsck = False
+  run_fsck = None
   needs_projid = prop_dict.get("needs_projid", 0)
   needs_casefold = prop_dict.get("needs_casefold", 0)
   needs_compress = prop_dict.get("needs_compress", 0)
 
   disable_sparse = "disable_sparse" in prop_dict
+  manual_sparse = False
 
   if fs_type.startswith("ext"):
     build_command = [prop_dict["ext_mkuserimg"]]
     if "extfs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.append(prop_dict["extfs_sparse_flag"])
-      run_e2fsck = True
+      run_e2fsck = RunE2fsck
     build_command.extend([in_dir, out_file, fs_type,
                           prop_dict["mount_point"]])
     build_command.append(prop_dict["image_size"])
@@ -320,17 +321,8 @@
     if "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
   elif fs_type.startswith("erofs"):
-    build_command = ["mkerofsimage.sh"]
-    build_command.extend([in_dir, out_file])
-    if "erofs_sparse_flag" in prop_dict and not disable_sparse:
-      build_command.extend([prop_dict["erofs_sparse_flag"]])
-    build_command.extend(["-m", prop_dict["mount_point"]])
-    if target_out:
-      build_command.extend(["-d", target_out])
-    if fs_config:
-      build_command.extend(["-C", fs_config])
-    if "selinux_fc" in prop_dict:
-      build_command.extend(["-c", prop_dict["selinux_fc"]])
+    build_command = ["mkfs.erofs"]
+
     compressor = None
     if "erofs_default_compressor" in prop_dict:
       compressor = prop_dict["erofs_default_compressor"]
@@ -338,16 +330,30 @@
       compressor = prop_dict["erofs_compressor"]
     if compressor:
       build_command.extend(["-z", compressor])
+
+    build_command.extend(["--mount-point", prop_dict["mount_point"]])
+    if target_out:
+      build_command.extend(["--product-out", target_out])
+    if fs_config:
+      build_command.extend(["--fs-config-file", fs_config])
+    if "selinux_fc" in prop_dict:
+      build_command.extend(["--file-contexts", prop_dict["selinux_fc"]])
     if "timestamp" in prop_dict:
       build_command.extend(["-T", str(prop_dict["timestamp"])])
     if "uuid" in prop_dict:
       build_command.extend(["-U", prop_dict["uuid"]])
     if "block_list" in prop_dict:
-      build_command.extend(["-B", prop_dict["block_list"]])
+      build_command.extend(["--block-list-file", prop_dict["block_list"]])
     if "erofs_pcluster_size" in prop_dict:
-      build_command.extend(["-P", prop_dict["erofs_pcluster_size"]])
+      build_command.extend(["-C", prop_dict["erofs_pcluster_size"]])
     if "erofs_share_dup_blocks" in prop_dict:
-      build_command.extend(["-k", "4096"])
+      build_command.extend(["--chunksize", "4096"])
+
+    build_command.extend([out_file, in_dir])
+    if "erofs_sparse_flag" in prop_dict and not disable_sparse:
+      manual_sparse = True
+
+    run_fsck = RunErofsFsck
   elif fs_type.startswith("squash"):
     build_command = ["mksquashfsimage.sh"]
     build_command.extend([in_dir, out_file])
@@ -436,18 +442,38 @@
               int(prop_dict["partition_size"]) // BYTES_IN_MB))
     raise
 
-  if run_e2fsck and prop_dict.get("skip_fsck") != "true":
-    unsparse_image = UnsparseImage(out_file, replace=False)
+  if run_fsck and prop_dict.get("skip_fsck") != "true":
+    run_fsck(out_file)
 
-    # Run e2fsck on the inflated image file
-    e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
-    try:
-      common.RunAndCheckOutput(e2fsck_command)
-    finally:
-      os.remove(unsparse_image)
+  if manual_sparse:
+    temp_file = out_file + ".sparse"
+    img2simg_argv = ["img2simg", out_file, temp_file]
+    common.RunAndCheckOutput(img2simg_argv)
+    os.rename(temp_file, out_file)
 
   return mkfs_output
 
+
+def RunE2fsck(out_file):
+  unsparse_image = UnsparseImage(out_file, replace=False)
+
+  # Run e2fsck on the inflated image file
+  e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
+  try:
+    common.RunAndCheckOutput(e2fsck_command)
+  finally:
+    os.remove(unsparse_image)
+
+
+def RunErofsFsck(out_file):
+  fsck_command = ["fsck.erofs", "--extract", out_file]
+  try:
+    common.RunAndCheckOutput(fsck_command)
+  except:
+    print("Check failed for EROFS image {}".format(out_file))
+    raise
+
+
 def BuildImage(in_dir, prop_dict, out_file, target_out=None):
   """Builds an image for the files under in_dir and writes it to out_file.
 
@@ -651,6 +677,7 @@
       "oem",
       "product",
       "system",
+      "system_dlkm",
       "system_ext",
       "system_other",
       "vendor",
@@ -704,7 +731,7 @@
       if mount_point not in allowed_partitions:
           continue
 
-    if mount_point == "system_other":
+    if (mount_point == "system_other") and (dest_prop != "partition_size"):
       # Propagate system properties to system_other. They'll get overridden
       # after as needed.
       copy_prop(src_prop.format("system"), dest_prop)
@@ -773,6 +800,8 @@
     copy_prop("partition_size", "vendor_dlkm_size")
   elif mount_point == "odm_dlkm":
     copy_prop("partition_size", "odm_dlkm_size")
+  elif mount_point == "system_dlkm":
+    copy_prop("partition_size", "system_dlkm_size")
   elif mount_point == "product":
     copy_prop("partition_size", "product_size")
   elif mount_point == "system_ext":
@@ -816,6 +845,8 @@
       mount_point = "vendor_dlkm"
     elif image_filename == "odm_dlkm.img":
       mount_point = "odm_dlkm"
+    elif image_filename == "system_dlkm.img":
+      mount_point = "system_dlkm"
     elif image_filename == "oem.img":
       mount_point = "oem"
     elif image_filename == "product.img":
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 58510a5..b395c19 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -181,8 +181,5 @@
 if __name__ == '__main__':
   try:
     main()
-  except AssertionError as err:
-    print('\n    ERROR: %s\n' % (err,))
-    sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/check_partition_sizes.py b/tools/releasetools/check_partition_sizes.py
index eaed07e..738d77d 100644
--- a/tools/releasetools/check_partition_sizes.py
+++ b/tools/releasetools/check_partition_sizes.py
@@ -300,8 +300,5 @@
   try:
     common.CloseInheritedPipes()
     main(sys.argv[1:])
-  except common.ExternalError:
-    logger.exception("\n   ERROR:\n")
-    sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index 0f56fb9..d935607 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -237,9 +237,11 @@
     # Signer #1 certificate DN: ...
     # Signer #1 certificate SHA-256 digest: ...
     # Signer #1 certificate SHA-1 digest: ...
+    # Signer (minSdkVersion=24, maxSdkVersion=32) certificate SHA-256 digest: 56be132b780656fe2444cd34326eb5d7aac91d2096abf0fe673a99270622ec87
+    # Signer (minSdkVersion=24, maxSdkVersion=32) certificate SHA-1 digest: 19da94896ce4078c38ca695701f1dec741ec6d67
     # ...
     certs_info = {}
-    certificate_regex = re.compile(r"(Signer #[0-9]+) (certificate .*):(.*)")
+    certificate_regex = re.compile(r"(Signer (?:#[0-9]+|\(.*\))) (certificate .*):(.*)")
     for line in output.splitlines():
       m = certificate_regex.match(line)
       if not m:
@@ -342,8 +344,8 @@
           apk = APK(fullname, displayname)
           self.apks[apk.filename] = apk
           self.apks_by_basename[os.path.basename(apk.filename)] = apk
-
-          self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
+          if apk.package:
+            self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
           self.max_fn_len = max(self.max_fn_len, len(apk.filename))
 
   def CheckSharedUids(self):
@@ -396,7 +398,8 @@
     by_digest = {}
     for apk in self.apks.values():
       for digest in apk.cert_digests:
-        by_digest.setdefault(digest, []).append((apk.package, apk))
+        if apk.package:
+          by_digest.setdefault(digest, []).append((apk.package, apk))
 
     order = [(-len(v), k) for (k, v) in by_digest.items()]
     order.sort()
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 213ae21..4a2a905 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -46,7 +46,7 @@
     '/product': ('PRODUCT', 'SYSTEM/product'),
     '/odm': ('ODM', 'VENDOR/odm', 'SYSTEM/vendor/odm'),
     '/system_ext': ('SYSTEM_EXT', 'SYSTEM/system_ext'),
-    # vendor_dlkm and odm_dlkm does not have VINTF files.
+    # vendor_dlkm, odm_dlkm, and system_dlkm does not have VINTF files.
 }
 
 UNZIP_PATTERN = ['META/*', '*/build.prop']
@@ -164,7 +164,7 @@
   """
   def PathToPatterns(path):
     if path[-1] == '/':
-      path += '*'
+      path += '**'
 
     # Loop over all the entries in DIR_SEARCH_PATHS and find one where the key
     # is a prefix of path. In order to get find the correct prefix, sort the
@@ -286,8 +286,5 @@
   try:
     common.CloseInheritedPipes()
     main(sys.argv[1:])
-  except common.ExternalError:
-    logger.exception('\n   ERROR:\n')
-    sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index e5c68bc..c2c6df1 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -114,7 +114,7 @@
 # accordingly.
 AVB_PARTITIONS = ('boot', 'init_boot', 'dtbo', 'odm', 'product', 'pvmfw', 'recovery',
                   'system', 'system_ext', 'vendor', 'vendor_boot',
-                  'vendor_dlkm', 'odm_dlkm')
+                  'vendor_dlkm', 'odm_dlkm', 'system_dlkm')
 
 # Chained VBMeta partitions.
 AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
@@ -128,6 +128,7 @@
     'odm',
     'vendor_dlkm',
     'odm_dlkm',
+    'system_dlkm',
 ]
 
 # Partitions with a build.prop file
@@ -470,10 +471,6 @@
   def oem_props(self):
     return self._oem_props
 
-  @property
-  def avb_enabled(self):
-    return self.get("avb_enable") == "true"
-
   def __getitem__(self, key):
     return self.info_dict[key]
 
@@ -497,8 +494,9 @@
   def GetPartitionBuildProp(self, prop, partition):
     """Returns the inquired build property for the provided partition."""
 
-    # Boot image uses ro.[product.]bootimage instead of boot.
-    prop_partition = "bootimage" if partition == "boot" else partition
+    # Boot image and init_boot image uses ro.[product.]bootimage instead of boot.
+    # This comes from the generic ramdisk
+    prop_partition = "bootimage" if partition == "boot" or partition == "init_boot" else partition
 
     # If provided a partition for this property, only look within that
     # partition's build.prop.
@@ -801,7 +799,7 @@
 
     # Redirect {partition}_base_fs_file for each of the named partitions.
     for part_name in ["system", "vendor", "system_ext", "product", "odm",
-                      "vendor_dlkm", "odm_dlkm"]:
+                      "vendor_dlkm", "odm_dlkm", "system_dlkm"]:
       key_name = part_name + "_base_fs_file"
       if key_name not in d:
         continue
@@ -1024,7 +1022,8 @@
 
     import_path = tokens[1]
     if not re.match(r'^/{}/.*\.prop$'.format(self.partition), import_path):
-      raise ValueError('Unrecognized import path {}'.format(line))
+      logger.warn('Unrecognized import path {}'.format(line))
+      return {}
 
     # We only recognize a subset of import statement that the init process
     # supports. And we can loose the restriction based on how the dynamic
@@ -1245,6 +1244,7 @@
           "VENDOR_DLKM", "VENDOR/vendor_dlkm", "SYSTEM/vendor/vendor_dlkm"
       ],
       "odm_dlkm": ["ODM_DLKM", "VENDOR/odm_dlkm", "SYSTEM/vendor/odm_dlkm"],
+      "system_dlkm": ["SYSTEM_DLKM", "SYSTEM/system_dlkm"],
   }
   partition_map = {}
   for partition, subdirs in possible_subdirs.items():
@@ -1401,7 +1401,7 @@
           "gki_signing_algorithm" in OPTIONS.info_dict)
 
 
-def _GenerateGkiCertificate(image, image_name, partition_name):
+def _GenerateGkiCertificate(image, image_name):
   key_path = OPTIONS.info_dict.get("gki_signing_key_path")
   algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
 
@@ -1430,8 +1430,7 @@
   if signature_args:
     cmd.extend(["--additional_avb_args", signature_args])
 
-  args = OPTIONS.info_dict.get(
-      "avb_" + partition_name + "_add_hash_footer_args", "")
+  args = OPTIONS.info_dict.get("avb_boot_add_hash_footer_args", "")
   args = args.strip()
   if args:
     cmd.extend(["--additional_avb_args", args])
@@ -1624,27 +1623,9 @@
   if args and args.strip():
     cmd.extend(shlex.split(args))
 
-  boot_signature = None
-  if _HasGkiCertificationArgs():
-    # Certify GKI images.
-    boot_signature_bytes = b''
-    if kernel_path is not None:
-      boot_signature_bytes += _GenerateGkiCertificate(
-          kernel_path, "generic_kernel", "boot")
-    if has_ramdisk:
-      boot_signature_bytes += _GenerateGkiCertificate(
-          ramdisk_img.name, "generic_ramdisk", "init_boot")
-
-    if len(boot_signature_bytes) > 0:
-      boot_signature = tempfile.NamedTemporaryFile()
-      boot_signature.write(boot_signature_bytes)
-      boot_signature.flush()
-      cmd.extend(["--boot_signature", boot_signature.name])
-  else:
-    # Certified GKI boot/init_boot image mustn't set 'mkbootimg_version_args'.
-    args = info_dict.get("mkbootimg_version_args")
-    if args and args.strip():
-      cmd.extend(shlex.split(args))
+  args = info_dict.get("mkbootimg_version_args")
+  if args and args.strip():
+    cmd.extend(shlex.split(args))
 
   if has_ramdisk:
     cmd.extend(["--ramdisk", ramdisk_img.name])
@@ -1666,6 +1647,29 @@
 
   RunAndCheckOutput(cmd)
 
+  if _HasGkiCertificationArgs():
+    if not os.path.exists(img.name):
+      raise ValueError("Cannot find GKI boot.img")
+    if kernel_path is None or not os.path.exists(kernel_path):
+      raise ValueError("Cannot find GKI kernel.img")
+
+    # Certify GKI images.
+    boot_signature_bytes = b''
+    boot_signature_bytes += _GenerateGkiCertificate(img.name, "boot")
+    boot_signature_bytes += _GenerateGkiCertificate(
+        kernel_path, "generic_kernel")
+
+    BOOT_SIGNATURE_SIZE = 16 * 1024
+    if len(boot_signature_bytes) > BOOT_SIGNATURE_SIZE:
+      raise ValueError(
+          f"GKI boot_signature size must be <= {BOOT_SIGNATURE_SIZE}")
+    boot_signature_bytes += (
+        b'\0' * (BOOT_SIGNATURE_SIZE - len(boot_signature_bytes)))
+    assert len(boot_signature_bytes) == BOOT_SIGNATURE_SIZE
+
+    with open(img.name, 'ab') as f:
+      f.write(boot_signature_bytes)
+
   if (info_dict.get("boot_signer") == "true" and
           info_dict.get("verity_key")):
     # Hard-code the path as "/boot" for two-step special recovery image (which
@@ -1726,9 +1730,6 @@
     ramdisk_img.close()
   img.close()
 
-  if boot_signature is not None:
-    boot_signature.close()
-
   return data
 
 
@@ -2242,8 +2243,8 @@
   stdoutdata, stderrdata = proc.communicate()
   if proc.returncode != 0:
     raise ExternalError(
-        "Failed to obtain minSdkVersion: aapt2 return code {}:\n{}\n{}".format(
-            proc.returncode, stdoutdata, stderrdata))
+        "Failed to obtain minSdkVersion for {}: aapt2 return code {}:\n{}\n{}".format(
+            apk_name, proc.returncode, stdoutdata, stderrdata))
 
   for line in stdoutdata.split("\n"):
     # Looking for lines such as sdkVersion:'23' or sdkVersion:'M'.
@@ -2816,6 +2817,9 @@
   """
   if isinstance(entries, str):
     entries = [entries]
+  # If list is empty, nothing to do
+  if not entries:
+    return
   cmd = ["zip", "-d", zip_filename] + entries
   RunAndCheckOutput(cmd)
 
diff --git a/tools/releasetools/fsverity_manifest_generator.py b/tools/releasetools/fsverity_manifest_generator.py
index 527cddb..b8184bc 100644
--- a/tools/releasetools/fsverity_manifest_generator.py
+++ b/tools/releasetools/fsverity_manifest_generator.py
@@ -55,6 +55,14 @@
       help='minimum supported sdk version of the generated manifest apk',
       required=True)
   p.add_argument(
+      '--version-code',
+      help='version code for the generated manifest apk',
+      required=True)
+  p.add_argument(
+      '--version-name',
+      help='version name for the generated manifest apk',
+      required=True)
+  p.add_argument(
       '--framework-res',
       help='path to framework-res.apk',
       required=True)
@@ -98,6 +106,8 @@
       "-A", os.path.join(temp_dir, "assets"),
       "-o", args.output,
       "--min-sdk-version", args.min_sdk_version,
+      "--version-code", args.version_code,
+      "--version-name", args.version_name,
       "-I", args.framework_res,
       "--manifest", args.apk_manifest_path])
   common.RunAndCheckOutput([args.apksigner_path, "sign", "--in", args.output,
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 0b2b187..76da89c 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -251,8 +251,5 @@
   try:
     common.CloseInheritedPipes()
     main(sys.argv[1:])
-  except common.ExternalError as e:
-    logger.exception('\n   ERROR:\n')
-    sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/merge/Android.bp b/tools/releasetools/merge/Android.bp
new file mode 100644
index 0000000..219acf8
--- /dev/null
+++ b/tools/releasetools/merge/Android.bp
@@ -0,0 +1,75 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+filegroup {
+    name: "releasetools_merge_sources",
+    srcs: [
+        "merge_compatibility_checks.py",
+        "merge_dexopt.py",
+        "merge_meta.py",
+        "merge_target_files.py",
+        "merge_utils.py",
+    ],
+}
+
+filegroup {
+    name: "releasetools_merge_tests",
+    srcs: [
+        "test_merge_compatibility_checks.py",
+        "test_merge_meta.py",
+        "test_merge_utils.py",
+    ],
+}
+
+python_binary_host {
+    name: "merge_target_files",
+    defaults: ["releasetools_binary_defaults"],
+    srcs: [":releasetools_merge_sources"],
+    libs: [
+        "releasetools_add_img_to_target_files",
+        "releasetools_build_super_image",
+        "releasetools_check_target_files_vintf",
+        "releasetools_common",
+        "releasetools_find_shareduid_violation",
+        "releasetools_img_from_target_files",
+        "releasetools_ota_from_target_files",
+    ],
+    required: [
+        "checkvintf",
+        "host_init_verifier",
+        "secilc",
+    ],
+    target: {
+        darwin: {
+            // libs dep "releasetools_ota_from_target_files" is disabled on darwin
+            enabled: false,
+        },
+    },
+}
+
+python_binary_host {
+    name: "merge_builds",
+    defaults: ["releasetools_binary_defaults"],
+    srcs: [
+        "merge_builds.py",
+    ],
+    libs: [
+        "releasetools_build_super_image",
+        "releasetools_common",
+    ],
+}
diff --git a/tools/releasetools/merge/OWNERS b/tools/releasetools/merge/OWNERS
new file mode 100644
index 0000000..9012e3a
--- /dev/null
+++ b/tools/releasetools/merge/OWNERS
@@ -0,0 +1,3 @@
+danielnorman@google.com
+jgalmes@google.com
+rseymour@google.com
diff --git a/tools/releasetools/merge_builds.py b/tools/releasetools/merge/merge_builds.py
similarity index 100%
rename from tools/releasetools/merge_builds.py
rename to tools/releasetools/merge/merge_builds.py
diff --git a/tools/releasetools/merge/merge_compatibility_checks.py b/tools/releasetools/merge/merge_compatibility_checks.py
new file mode 100644
index 0000000..207abe2
--- /dev/null
+++ b/tools/releasetools/merge/merge_compatibility_checks.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+"""Compatibility checks that should be performed on merged target_files."""
+
+import json
+import logging
+import os
+from xml.etree import ElementTree
+
+import apex_utils
+import check_target_files_vintf
+import common
+import find_shareduid_violation
+
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
+
+
+def CheckCompatibility(target_files_dir, partition_map):
+  """Runs various compatibility checks.
+
+  Returns a possibly-empty list of error messages.
+  """
+  errors = []
+
+  errors.extend(CheckVintf(target_files_dir))
+  errors.extend(CheckShareduidViolation(target_files_dir, partition_map))
+  errors.extend(CheckApexDuplicatePackages(target_files_dir, partition_map))
+
+  # The remaining checks only use the following partitions:
+  partition_map = {
+      partition: path
+      for partition, path in partition_map.items()
+      if partition in ('system', 'system_ext', 'product', 'vendor', 'odm')
+  }
+
+  errors.extend(CheckInitRcFiles(target_files_dir, partition_map))
+  errors.extend(CheckCombinedSepolicy(target_files_dir, partition_map))
+
+  return errors
+
+
+def CheckVintf(target_files_dir):
+  """Check for any VINTF issues using check_vintf."""
+  errors = []
+  try:
+    if not check_target_files_vintf.CheckVintf(target_files_dir):
+      errors.append('Incompatible VINTF.')
+  except RuntimeError as err:
+    errors.append(str(err))
+  return errors
+
+
+def CheckShareduidViolation(target_files_dir, partition_map):
+  """Check for any APK sharedUserId violations across partition sets.
+
+  Writes results to META/shareduid_violation_modules.json to help
+  with followup debugging.
+  """
+  errors = []
+  violation = find_shareduid_violation.FindShareduidViolation(
+      target_files_dir, partition_map)
+  shareduid_violation_modules = os.path.join(
+      target_files_dir, 'META', 'shareduid_violation_modules.json')
+  with open(shareduid_violation_modules, 'w') as f:
+    # Write the output to a file to enable debugging.
+    f.write(violation)
+
+    # Check for violations across the partition sets.
+    shareduid_errors = common.SharedUidPartitionViolations(
+        json.loads(violation),
+        [OPTIONS.framework_partition_set, OPTIONS.vendor_partition_set])
+    if shareduid_errors:
+      for error in shareduid_errors:
+        errors.append('APK sharedUserId error: %s' % error)
+      errors.append('See APK sharedUserId violations file: %s' %
+                    shareduid_violation_modules)
+  return errors
+
+
+def CheckInitRcFiles(target_files_dir, partition_map):
+  """Check for any init.rc issues using host_init_verifier."""
+  try:
+    common.RunHostInitVerifier(
+        product_out=target_files_dir, partition_map=partition_map)
+  except RuntimeError as err:
+    return [str(err)]
+  return []
+
+
+def CheckCombinedSepolicy(target_files_dir, partition_map, execute=True):
+  """Uses secilc to compile a split sepolicy file.
+
+  Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
+  """
+  errors = []
+
+  def get_file(partition, path):
+    if partition not in partition_map:
+      logger.warning('Cannot load SEPolicy files for missing partition %s',
+                     partition)
+      return None
+    file_path = os.path.join(target_files_dir, partition_map[partition], path)
+    if os.path.exists(file_path):
+      return file_path
+    return None
+
+  # Load the kernel sepolicy version from the FCM. This is normally provided
+  # directly to selinux.cpp as a build flag, but is also available in this file.
+  fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
+  if not fcm_file:
+    errors.append('Missing required file for loading sepolicy: '
+                  '/system/etc/vintf/compatibility_matrix.device.xml')
+    return errors
+  kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
+      'sepolicy/kernel-sepolicy-version').text
+
+  # Load the vendor's plat sepolicy version. This is the version used for
+  # locating sepolicy mapping files.
+  vendor_plat_version_file = get_file('vendor',
+                                      'etc/selinux/plat_sepolicy_vers.txt')
+  if not vendor_plat_version_file:
+    errors.append('Missing required sepolicy file %s' %
+                  vendor_plat_version_file)
+    return errors
+  with open(vendor_plat_version_file) as f:
+    vendor_plat_version = f.read().strip()
+
+  # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
+  cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
+  cmd.extend(['-c', kernel_sepolicy_version])
+  cmd.extend(['-o', os.path.join(target_files_dir, 'META/combined_sepolicy')])
+  cmd.extend(['-f', '/dev/null'])
+
+  required_policy_files = (
+      ('system', 'etc/selinux/plat_sepolicy.cil'),
+      ('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('vendor', 'etc/selinux/vendor_sepolicy.cil'),
+      ('vendor', 'etc/selinux/plat_pub_versioned.cil'),
+  )
+  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+                     required_policy_files)):
+    if not policy:
+      errors.append('Missing required sepolicy file %s' % policy)
+      return errors
+    cmd.append(policy)
+
+  optional_policy_files = (
+      ('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
+      ('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
+      ('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('product', 'etc/selinux/product_sepolicy.cil'),
+      ('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('odm', 'etc/selinux/odm_sepolicy.cil'),
+  )
+  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+                     optional_policy_files)):
+    if policy:
+      cmd.append(policy)
+
+  try:
+    if execute:
+      common.RunAndCheckOutput(cmd)
+    else:
+      return cmd
+  except RuntimeError as err:
+    errors.append(str(err))
+
+  return errors
+
+
+def CheckApexDuplicatePackages(target_files_dir, partition_map):
+  """Checks if the same APEX package name is provided by multiple partitions."""
+  errors = []
+
+  apex_packages = set()
+  for partition in partition_map.keys():
+    try:
+      apex_info = apex_utils.GetApexInfoFromTargetFiles(
+          target_files_dir, partition, compressed_only=False)
+    except RuntimeError as err:
+      errors.append(str(err))
+      apex_info = []
+    partition_apex_packages = set([info.package_name for info in apex_info])
+    duplicates = apex_packages.intersection(partition_apex_packages)
+    if duplicates:
+      errors.append(
+          'Duplicate APEX package_names found in multiple partitions: %s' %
+          ' '.join(duplicates))
+    apex_packages.update(partition_apex_packages)
+
+  return errors
diff --git a/tools/releasetools/merge/merge_dexopt.py b/tools/releasetools/merge/merge_dexopt.py
new file mode 100644
index 0000000..7bf9bd4
--- /dev/null
+++ b/tools/releasetools/merge/merge_dexopt.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+"""Generates dexopt files for vendor apps, from a merged target_files.
+
+Expects items in OPTIONS prepared by merge_target_files.py.
+"""
+
+import glob
+import json
+import logging
+import os
+import shutil
+import subprocess
+
+import common
+import merge_utils
+
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
+
+
+def MergeDexopt(temp_dir, output_target_files_dir):
+  """If needed, generates dexopt files for vendor apps.
+
+  Args:
+    temp_dir: Location containing an 'output' directory where target files have
+      been extracted, e.g. <temp_dir>/output/SYSTEM, <temp_dir>/output/IMAGES,
+      etc.
+    output_target_files_dir: The name of a directory that will be used to create
+      the output target files package after all the special cases are processed.
+  """
+  # Load vendor and framework META/misc_info.txt.
+  if (OPTIONS.vendor_misc_info.get('building_with_vsdk') != 'true' or
+      OPTIONS.framework_dexpreopt_tools is None or
+      OPTIONS.framework_dexpreopt_config is None or
+      OPTIONS.vendor_dexpreopt_config is None):
+    return
+
+  logger.info('applying dexpreopt')
+
+  # The directory structure to apply dexpreopt is:
+  #
+  # <temp_dir>/
+  #     framework_meta/
+  #         META/
+  #     vendor_meta/
+  #         META/
+  #     output/
+  #         SYSTEM/
+  #         VENDOR/
+  #         IMAGES/
+  #         <other items extracted from system and vendor target files>
+  #     tools/
+  #         <contents of dexpreopt_tools.zip>
+  #     system_config/
+  #         <contents of system dexpreopt_config.zip>
+  #     vendor_config/
+  #         <contents of vendor dexpreopt_config.zip>
+  #     system -> output/SYSTEM
+  #     vendor -> output/VENDOR
+  #     apex -> output/SYSTEM/apex (only for flattened APEX builds)
+  #     apex/ (extracted updatable APEX)
+  #         <apex 1>/
+  #             ...
+  #         <apex 2>/
+  #             ...
+  #         ...
+  #     out/dex2oat_result/vendor/
+  #         <app>
+  #             oat/arm64/
+  #                 package.vdex
+  #                 package.odex
+  #         <priv-app>
+  #             oat/arm64/
+  #                 package.vdex
+  #                 package.odex
+  dexpreopt_tools_files_temp_dir = os.path.join(temp_dir, 'tools')
+  dexpreopt_framework_config_files_temp_dir = os.path.join(
+      temp_dir, 'system_config')
+  dexpreopt_vendor_config_files_temp_dir = os.path.join(temp_dir,
+                                                        'vendor_config')
+
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.framework_dexpreopt_tools,
+      output_dir=dexpreopt_tools_files_temp_dir,
+      extract_item_list=('*',))
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.framework_dexpreopt_config,
+      output_dir=dexpreopt_framework_config_files_temp_dir,
+      extract_item_list=('*',))
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.vendor_dexpreopt_config,
+      output_dir=dexpreopt_vendor_config_files_temp_dir,
+      extract_item_list=('*',))
+
+  os.symlink(
+      os.path.join(output_target_files_dir, 'SYSTEM'),
+      os.path.join(temp_dir, 'system'))
+  os.symlink(
+      os.path.join(output_target_files_dir, 'VENDOR'),
+      os.path.join(temp_dir, 'vendor'))
+
+  # The directory structure for flatteded APEXes is:
+  #
+  # SYSTEM
+  #     apex
+  #         <APEX name, e.g., com.android.wifi>
+  #             apex_manifest.pb
+  #             apex_pubkey
+  #             etc/
+  #             javalib/
+  #             lib/
+  #             lib64/
+  #             priv-app/
+  #
+  # The directory structure for updatable APEXes is:
+  #
+  # SYSTEM
+  #     apex
+  #         com.android.adbd.apex
+  #         com.android.appsearch.apex
+  #         com.android.art.apex
+  #         ...
+  apex_root = os.path.join(output_target_files_dir, 'SYSTEM', 'apex')
+
+  # Check for flattended versus updatable APEX.
+  if OPTIONS.framework_misc_info.get('target_flatten_apex') == 'false':
+    # Extract APEX.
+    logging.info('extracting APEX')
+
+    apex_extract_root_dir = os.path.join(temp_dir, 'apex')
+    os.makedirs(apex_extract_root_dir)
+
+    for apex in (glob.glob(os.path.join(apex_root, '*.apex')) +
+                 glob.glob(os.path.join(apex_root, '*.capex'))):
+      logging.info('  apex: %s', apex)
+      # deapexer is in the same directory as the merge_target_files binary extracted
+      # from otatools.zip.
+      apex_json_info = subprocess.check_output(['deapexer', 'info', apex])
+      logging.info('    info: %s', apex_json_info)
+      apex_info = json.loads(apex_json_info)
+      apex_name = apex_info['name']
+      logging.info('    name: %s', apex_name)
+
+      apex_extract_dir = os.path.join(apex_extract_root_dir, apex_name)
+      os.makedirs(apex_extract_dir)
+
+      # deapexer uses debugfs_static, which is part of otatools.zip.
+      command = [
+          'deapexer',
+          '--debugfs_path',
+          'debugfs_static',
+          'extract',
+          apex,
+          apex_extract_dir,
+      ]
+      logging.info('    running %s', command)
+      subprocess.check_call(command)
+  else:
+    # Flattened APEXes don't need to be extracted since they have the necessary
+    # directory structure.
+    os.symlink(os.path.join(apex_root), os.path.join(temp_dir, 'apex'))
+
+  # Modify system config to point to the tools that have been extracted.
+  # Absolute or .. paths are not allowed  by the dexpreopt_gen tool in
+  # dexpreopt_soong.config.
+  dexpreopt_framework_soon_config = os.path.join(
+      dexpreopt_framework_config_files_temp_dir, 'dexpreopt_soong.config')
+  with open(dexpreopt_framework_soon_config, 'w') as f:
+    dexpreopt_soong_config = {
+        'Profman': 'tools/profman',
+        'Dex2oat': 'tools/dex2oatd',
+        'Aapt': 'tools/aapt2',
+        'SoongZip': 'tools/soong_zip',
+        'Zip2zip': 'tools/zip2zip',
+        'ManifestCheck': 'tools/manifest_check',
+        'ConstructContext': 'tools/construct_context',
+    }
+    json.dump(dexpreopt_soong_config, f)
+
+  # TODO(b/188179859): Make *dex location configurable to vendor or system_other.
+  use_system_other_odex = False
+
+  if use_system_other_odex:
+    dex_img = 'SYSTEM_OTHER'
+  else:
+    dex_img = 'VENDOR'
+    # Open vendor_filesystem_config to append the items generated by dexopt.
+    vendor_file_system_config = open(
+        os.path.join(temp_dir, 'output', 'META',
+                     'vendor_filesystem_config.txt'), 'a')
+
+  # Dexpreopt vendor apps.
+  dexpreopt_config_suffix = '_dexpreopt.config'
+  for config in glob.glob(
+      os.path.join(dexpreopt_vendor_config_files_temp_dir,
+                   '*' + dexpreopt_config_suffix)):
+    app = os.path.basename(config)[:-len(dexpreopt_config_suffix)]
+    logging.info('dexpreopt config: %s %s', config, app)
+
+    apk_dir = 'app'
+    apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
+    if not os.path.exists(apk_path):
+      apk_dir = 'priv-app'
+      apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
+      if not os.path.exists(apk_path):
+        logging.warning(
+            'skipping dexpreopt for %s, no apk found in vendor/app '
+            'or vendor/priv-app', app)
+        continue
+
+    # Generate dexpreopting script. Note 'out_dir' is not the output directory
+    # where the script is generated, but the OUT_DIR at build time referenced
+    # in the dexpreot config files, e.g., "out/.../core-oj.jar", so the tool knows
+    # how to adjust the path.
+    command = [
+        os.path.join(dexpreopt_tools_files_temp_dir, 'dexpreopt_gen'),
+        '-global',
+        os.path.join(dexpreopt_framework_config_files_temp_dir,
+                     'dexpreopt.config'),
+        '-global_soong',
+        os.path.join(dexpreopt_framework_config_files_temp_dir,
+                     'dexpreopt_soong.config'),
+        '-module',
+        config,
+        '-dexpreopt_script',
+        'dexpreopt_app.sh',
+        '-out_dir',
+        'out',
+        '-base_path',
+        '.',
+        '--uses_target_files',
+    ]
+
+    # Run the command from temp_dir so all tool paths are its descendants.
+    logging.info('running %s', command)
+    subprocess.check_call(command, cwd=temp_dir)
+
+    # Call the generated script.
+    command = ['sh', 'dexpreopt_app.sh', apk_path]
+    logging.info('running %s', command)
+    subprocess.check_call(command, cwd=temp_dir)
+
+    # Output files are in:
+    #
+    # <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.vdex
+    # <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.odex
+    # <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.vdex
+    # <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.odex
+    #
+    # Copy the files to their destination. The structure of system_other is:
+    #
+    # system_other/
+    #     system-other-odex-marker
+    #     system/
+    #         app/
+    #             <app>/oat/arm64/
+    #                 <app>.odex
+    #                 <app>.vdex
+    #             ...
+    #         priv-app/
+    #             <app>/oat/arm64/
+    #                 <app>.odex
+    #                 <app>.vdex
+    #             ...
+
+    # TODO(b/188179859): Support for other architectures.
+    arch = 'arm64'
+
+    dex_destination = os.path.join(temp_dir, 'output', dex_img, apk_dir, app,
+                                   'oat', arch)
+    os.makedirs(dex_destination)
+    dex2oat_path = os.path.join(temp_dir, 'out', 'dex2oat_result', 'vendor',
+                                apk_dir, app, 'oat', arch)
+    shutil.copy(
+        os.path.join(dex2oat_path, 'package.vdex'),
+        os.path.join(dex_destination, app + '.vdex'))
+    shutil.copy(
+        os.path.join(dex2oat_path, 'package.odex'),
+        os.path.join(dex_destination, app + '.odex'))
+
+    # Append entries to vendor_file_system_config.txt, such as:
+    #
+    # vendor/app/<app>/oat 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
+    # vendor/app/<app>/oat/arm64 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
+    # vendor/app/<app>/oat/arm64/<app>.odex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
+    # vendor/app/<app>/oat/arm64/<app>.vdex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
+    if not use_system_other_odex:
+      vendor_app_prefix = 'vendor/' + apk_dir + '/' + app + '/oat'
+      selabel = 'selabel=u:object_r:vendor_app_file:s0 capabilities=0x0'
+      vendor_file_system_config.writelines([
+          vendor_app_prefix + ' 0 2000 755 ' + selabel + '\n',
+          vendor_app_prefix + '/' + arch + ' 0 2000 755 ' + selabel + '\n',
+          vendor_app_prefix + '/' + arch + '/' + app + '.odex 0 0 644 ' +
+          selabel + '\n',
+          vendor_app_prefix + '/' + arch + '/' + app + '.vdex 0 0 644 ' +
+          selabel + '\n',
+      ])
+
+  if not use_system_other_odex:
+    vendor_file_system_config.close()
+    # Delete vendor.img so that it will be regenerated.
+    # TODO(b/188179859): Rebuilding a vendor image in GRF mode (e.g., T(framework)
+    #                    and S(vendor) may require logic similar to that in
+    #                    rebuild_image_with_sepolicy.
+    vendor_img = os.path.join(output_target_files_dir, 'IMAGES', 'vendor.img')
+    if os.path.exists(vendor_img):
+      logging.info('Deleting %s', vendor_img)
+      os.remove(vendor_img)
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
new file mode 100644
index 0000000..580b3ce
--- /dev/null
+++ b/tools/releasetools/merge/merge_meta.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+"""Functions for merging META/* files from partial builds.
+
+Expects items in OPTIONS prepared by merge_target_files.py.
+"""
+
+import logging
+import os
+import re
+import shutil
+
+import build_image
+import common
+import merge_utils
+import sparse_img
+import verity_utils
+
+from common import ExternalError
+
+logger = logging.getLogger(__name__)
+
+OPTIONS = common.OPTIONS
+
+# In apexkeys.txt or apkcerts.txt, we will find partition tags on each entry in
+# the file. We use these partition tags to filter the entries in those files
+# from the two different target files packages to produce a merged apexkeys.txt
+# or apkcerts.txt file. A partition tag (e.g., for the product partition) looks
+# like this: 'partition="product"'. We use the group syntax grab the value of
+# the tag. We use non-greedy matching in case there are other fields on the
+# same line.
+
+PARTITION_TAG_PATTERN = re.compile(r'partition="(.*?)"')
+
+# The sorting algorithm for apexkeys.txt and apkcerts.txt does not include the
+# ".apex" or ".apk" suffix, so we use the following pattern to extract a key.
+
+MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
+
+
+def MergeMetaFiles(temp_dir, merged_dir):
+  """Merges various files in META/*."""
+
+  framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.framework_target_files,
+      output_dir=os.path.dirname(framework_meta_dir),
+      extract_item_list=('META/*',))
+
+  vendor_meta_dir = os.path.join(temp_dir, 'vendor_meta', 'META')
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.vendor_target_files,
+      output_dir=os.path.dirname(vendor_meta_dir),
+      extract_item_list=('META/*',))
+
+  merged_meta_dir = os.path.join(merged_dir, 'META')
+
+  # Merge META/misc_info.txt into OPTIONS.merged_misc_info,
+  # but do not write it yet. The following functions may further
+  # modify this dict.
+  OPTIONS.merged_misc_info = MergeMiscInfo(
+      framework_meta_dir=framework_meta_dir,
+      vendor_meta_dir=vendor_meta_dir,
+      merged_meta_dir=merged_meta_dir)
+
+  CopyNamedFileContexts(
+      framework_meta_dir=framework_meta_dir,
+      vendor_meta_dir=vendor_meta_dir,
+      merged_meta_dir=merged_meta_dir)
+
+  if OPTIONS.merged_misc_info.get('use_dynamic_partitions') == 'true':
+    MergeDynamicPartitionsInfo(
+        framework_meta_dir=framework_meta_dir,
+        vendor_meta_dir=vendor_meta_dir,
+        merged_meta_dir=merged_meta_dir)
+
+  if OPTIONS.merged_misc_info.get('ab_update') == 'true':
+    MergeAbPartitions(
+        framework_meta_dir=framework_meta_dir,
+        vendor_meta_dir=vendor_meta_dir,
+        merged_meta_dir=merged_meta_dir)
+    UpdateCareMapImageSizeProps(images_dir=os.path.join(merged_dir, 'IMAGES'))
+
+  for file_name in ('apkcerts.txt', 'apexkeys.txt'):
+    MergePackageKeys(
+        framework_meta_dir=framework_meta_dir,
+        vendor_meta_dir=vendor_meta_dir,
+        merged_meta_dir=merged_meta_dir,
+        file_name=file_name)
+
+  # Write the now-finalized OPTIONS.merged_misc_info.
+  merge_utils.WriteSortedData(
+      data=OPTIONS.merged_misc_info,
+      path=os.path.join(merged_meta_dir, 'misc_info.txt'))
+
+
+def MergeAbPartitions(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
+  """Merges META/ab_partitions.txt.
+
+  The output contains the union of the partition names.
+  """
+  with open(os.path.join(framework_meta_dir, 'ab_partitions.txt')) as f:
+    framework_ab_partitions = f.read().splitlines()
+
+  with open(os.path.join(vendor_meta_dir, 'ab_partitions.txt')) as f:
+    vendor_ab_partitions = f.read().splitlines()
+
+  merge_utils.WriteSortedData(
+      data=set(framework_ab_partitions + vendor_ab_partitions),
+      path=os.path.join(merged_meta_dir, 'ab_partitions.txt'))
+
+
+def MergeMiscInfo(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
+  """Merges META/misc_info.txt.
+
+  The output contains a combination of key=value pairs from both inputs.
+  Most pairs are taken from the vendor input, while some are taken from
+  the framework input.
+  """
+
+  OPTIONS.framework_misc_info = common.LoadDictionaryFromFile(
+      os.path.join(framework_meta_dir, 'misc_info.txt'))
+  OPTIONS.vendor_misc_info = common.LoadDictionaryFromFile(
+      os.path.join(vendor_meta_dir, 'misc_info.txt'))
+
+  # Merged misc info is a combination of vendor misc info plus certain values
+  # from the framework misc info.
+
+  merged_dict = OPTIONS.vendor_misc_info
+  for key in OPTIONS.framework_misc_info_keys:
+    if key in OPTIONS.framework_misc_info:
+      merged_dict[key] = OPTIONS.framework_misc_info[key]
+
+  # If AVB is enabled then ensure that we build vbmeta.img.
+  # Partial builds with AVB enabled may set PRODUCT_BUILD_VBMETA_IMAGE=false to
+  # skip building an incomplete vbmeta.img.
+  if merged_dict.get('avb_enable') == 'true':
+    merged_dict['avb_building_vbmeta_image'] = 'true'
+
+  return merged_dict
+
+
+def MergeDynamicPartitionsInfo(framework_meta_dir, vendor_meta_dir,
+                               merged_meta_dir):
+  """Merge META/dynamic_partitions_info.txt."""
+  framework_dynamic_partitions_dict = common.LoadDictionaryFromFile(
+      os.path.join(framework_meta_dir, 'dynamic_partitions_info.txt'))
+  vendor_dynamic_partitions_dict = common.LoadDictionaryFromFile(
+      os.path.join(vendor_meta_dir, 'dynamic_partitions_info.txt'))
+
+  merged_dynamic_partitions_dict = common.MergeDynamicPartitionInfoDicts(
+      framework_dict=framework_dynamic_partitions_dict,
+      vendor_dict=vendor_dynamic_partitions_dict)
+
+  merge_utils.WriteSortedData(
+      data=merged_dynamic_partitions_dict,
+      path=os.path.join(merged_meta_dir, 'dynamic_partitions_info.txt'))
+
+  # Merge misc info keys used for Dynamic Partitions.
+  OPTIONS.merged_misc_info.update(merged_dynamic_partitions_dict)
+  # Ensure that add_img_to_target_files rebuilds super split images for
+  # devices that retrofit dynamic partitions. This flag may have been set to
+  # false in the partial builds to prevent duplicate building of super.img.
+  OPTIONS.merged_misc_info['build_super_partition'] = 'true'
+
+
+def MergePackageKeys(framework_meta_dir, vendor_meta_dir, merged_meta_dir,
+                     file_name):
+  """Merges APK/APEX key list files."""
+
+  if file_name not in ('apkcerts.txt', 'apexkeys.txt'):
+    raise ExternalError(
+        'Unexpected file_name provided to merge_package_keys_txt: %s',
+        file_name)
+
+  def read_helper(d):
+    temp = {}
+    with open(os.path.join(d, file_name)) as f:
+      for line in f.read().splitlines():
+        line = line.strip()
+        if line:
+          name_search = MODULE_KEY_PATTERN.search(line.split()[0])
+          temp[name_search.group(1)] = line
+    return temp
+
+  framework_dict = read_helper(framework_meta_dir)
+  vendor_dict = read_helper(vendor_meta_dir)
+  merged_dict = {}
+
+  def filter_into_merged_dict(item_dict, partition_set):
+    for key, value in item_dict.items():
+      tag_search = PARTITION_TAG_PATTERN.search(value)
+
+      if tag_search is None:
+        raise ValueError('Entry missing partition tag: %s' % value)
+
+      partition_tag = tag_search.group(1)
+
+      if partition_tag in partition_set:
+        if key in merged_dict:
+          if OPTIONS.allow_duplicate_apkapex_keys:
+            # TODO(b/150582573) Always raise on duplicates.
+            logger.warning('Duplicate key %s' % key)
+            continue
+          else:
+            raise ValueError('Duplicate key %s' % key)
+
+        merged_dict[key] = value
+
+  # Prioritize framework keys first.
+  # Duplicate keys from vendor are an error, or ignored.
+  filter_into_merged_dict(framework_dict, OPTIONS.framework_partition_set)
+  filter_into_merged_dict(vendor_dict, OPTIONS.vendor_partition_set)
+
+  # The following code is similar to WriteSortedData, but different enough
+  # that we couldn't use that function. We need the output to be sorted by the
+  # basename of the apex/apk (without the ".apex" or ".apk" suffix). This
+  # allows the sort to be consistent with the framework/vendor input data and
+  # eases comparison of input data with merged data.
+  with open(os.path.join(merged_meta_dir, file_name), 'w') as output:
+    for key, value in sorted(merged_dict.items()):
+      output.write(value + '\n')
+
+
+def CopyNamedFileContexts(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
+  """Creates named copies of each partial build's file_contexts.bin.
+
+  Used when regenerating images from the partial build.
+  """
+
+  def copy_fc_file(source_dir, file_name):
+    for name in (file_name, 'file_contexts.bin'):
+      fc_path = os.path.join(source_dir, name)
+      if os.path.exists(fc_path):
+        shutil.copyfile(fc_path, os.path.join(merged_meta_dir, file_name))
+        return
+    raise ValueError('Missing file_contexts file from %s: %s', source_dir,
+                     file_name)
+
+  copy_fc_file(framework_meta_dir, 'framework_file_contexts.bin')
+  copy_fc_file(vendor_meta_dir, 'vendor_file_contexts.bin')
+
+  # Replace <image>_selinux_fc values with framework or vendor file_contexts.bin
+  # depending on which dictionary the key came from.
+  # Only the file basename is required because all selinux_fc properties are
+  # replaced with the full path to the file under META/ when misc_info.txt is
+  # loaded from target files for repacking. See common.py LoadInfoDict().
+  for key in OPTIONS.vendor_misc_info:
+    if key.endswith('_selinux_fc'):
+      OPTIONS.merged_misc_info[key] = 'vendor_file_contexts.bin'
+  for key in OPTIONS.framework_misc_info:
+    if key.endswith('_selinux_fc'):
+      OPTIONS.merged_misc_info[key] = 'framework_file_contexts.bin'
+
+
+def UpdateCareMapImageSizeProps(images_dir):
+  """Sets <partition>_image_size props in misc_info.
+
+  add_images_to_target_files uses these props to generate META/care_map.pb.
+  Regenerated images will have this property set during regeneration.
+
+  However, images copied directly from input partial target files packages
+  need this value calculated here.
+  """
+  for partition in common.PARTITIONS_WITH_CARE_MAP:
+    image_path = os.path.join(images_dir, '{}.img'.format(partition))
+    if os.path.exists(image_path):
+      partition_size = sparse_img.GetImagePartitionSize(image_path)
+      image_props = build_image.ImagePropFromGlobalDict(
+          OPTIONS.merged_misc_info, partition)
+      verity_image_builder = verity_utils.CreateVerityImageBuilder(image_props)
+      image_size = verity_image_builder.CalculateMaxImageSize(partition_size)
+      OPTIONS.merged_misc_info['{}_image_size'.format(partition)] = image_size
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
new file mode 100755
index 0000000..c06fd4c
--- /dev/null
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -0,0 +1,611 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+"""This script merges two partial target files packages.
+
+One input package contains framework files, and the other contains vendor files.
+
+This script produces a complete, merged target files package:
+  - This package can be used to generate a flashable IMG package.
+    See --output-img.
+  - This package can be used to generate an OTA package. See --output-ota.
+  - The merged package is checked for compatibility between the two inputs.
+
+Usage: merge_target_files [args]
+
+  --framework-target-files framework-target-files-zip-archive
+      The input target files package containing framework bits. This is a zip
+      archive.
+
+  --framework-item-list framework-item-list-file
+      The optional path to a newline-separated config file of items that
+      are extracted as-is from the framework target files package.
+
+  --framework-misc-info-keys framework-misc-info-keys-file
+      The optional path to a newline-separated config file of keys to
+      extract from the framework META/misc_info.txt file.
+
+  --vendor-target-files vendor-target-files-zip-archive
+      The input target files package containing vendor bits. This is a zip
+      archive.
+
+  --vendor-item-list vendor-item-list-file
+      The optional path to a newline-separated config file of items that
+      are extracted as-is from the vendor target files package.
+
+  --output-target-files output-target-files-package
+      If provided, the output merged target files package. Also a zip archive.
+
+  --output-dir output-directory
+      If provided, the destination directory for saving merged files. Requires
+      the --output-item-list flag.
+      Can be provided alongside --output-target-files, or by itself.
+
+  --output-item-list output-item-list-file.
+      The optional path to a newline-separated config file that specifies the
+      file patterns to copy into the --output-dir. Required if providing
+      the --output-dir flag.
+
+  --output-ota output-ota-package
+      The output ota package. This is a zip archive. Use of this flag may
+      require passing the --path common flag; see common.py.
+
+  --output-img output-img-package
+      The output img package, suitable for use with 'fastboot update'. Use of
+      this flag may require passing the --path common flag; see common.py.
+
+  --output-super-empty output-super-empty-image
+      If provided, creates a super_empty.img file from the merged target
+      files package and saves it at this path.
+
+  --rebuild_recovery
+      Copy the recovery image used by non-A/B devices, used when
+      regenerating vendor images with --rebuild-sepolicy.
+
+  --allow-duplicate-apkapex-keys
+      If provided, duplicate APK/APEX keys are ignored and the value from the
+      framework is used.
+
+  --rebuild-sepolicy
+      If provided, rebuilds odm.img or vendor.img to include merged sepolicy
+      files. If odm is present then odm is preferred.
+
+  --vendor-otatools otatools.zip
+      If provided, use this otatools.zip when recompiling the odm or vendor
+      image to include sepolicy.
+
+  --keep-tmp
+      Keep tempoary files for debugging purposes.
+
+  The following only apply when using the VSDK to perform dexopt on vendor apps:
+
+  --framework-dexpreopt-config
+      If provided, the location of framwework's dexpreopt_config.zip.
+
+  --framework-dexpreopt-tools
+      if provided, the location of framework's dexpreopt_tools.zip.
+
+  --vendor-dexpreopt-config
+      If provided, the location of vendor's dexpreopt_config.zip.
+"""
+
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+import add_img_to_target_files
+import build_image
+import build_super_image
+import common
+import img_from_target_files
+import merge_compatibility_checks
+import merge_dexopt
+import merge_meta
+import merge_utils
+import ota_from_target_files
+
+from common import ExternalError
+
+logger = logging.getLogger(__name__)
+
+OPTIONS = common.OPTIONS
+# Always turn on verbose logging.
+OPTIONS.verbose = True
+OPTIONS.framework_target_files = None
+OPTIONS.framework_item_list = []
+OPTIONS.framework_misc_info_keys = []
+OPTIONS.vendor_target_files = None
+OPTIONS.vendor_item_list = []
+OPTIONS.output_target_files = None
+OPTIONS.output_dir = None
+OPTIONS.output_item_list = []
+OPTIONS.output_ota = None
+OPTIONS.output_img = None
+OPTIONS.output_super_empty = None
+OPTIONS.rebuild_recovery = False
+# TODO(b/150582573): Remove this option.
+OPTIONS.allow_duplicate_apkapex_keys = False
+OPTIONS.vendor_otatools = None
+OPTIONS.rebuild_sepolicy = False
+OPTIONS.keep_tmp = False
+OPTIONS.framework_dexpreopt_config = None
+OPTIONS.framework_dexpreopt_tools = None
+OPTIONS.vendor_dexpreopt_config = None
+
+
+def create_merged_package(temp_dir):
+  """Merges two target files packages into one target files structure.
+
+  Returns:
+    Path to merged package under temp directory.
+  """
+  # Extract "as is" items from the input framework and vendor partial target
+  # files packages directly into the output temporary directory, since these items
+  # do not need special case processing.
+
+  output_target_files_temp_dir = os.path.join(temp_dir, 'output')
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.framework_target_files,
+      output_dir=output_target_files_temp_dir,
+      extract_item_list=OPTIONS.framework_item_list)
+  merge_utils.ExtractItems(
+      input_zip=OPTIONS.vendor_target_files,
+      output_dir=output_target_files_temp_dir,
+      extract_item_list=OPTIONS.vendor_item_list)
+
+  # Perform special case processing on META/* items.
+  # After this function completes successfully, all the files we need to create
+  # the output target files package are in place.
+  merge_meta.MergeMetaFiles(
+      temp_dir=temp_dir, merged_dir=output_target_files_temp_dir)
+
+  merge_dexopt.MergeDexopt(
+      temp_dir=temp_dir, output_target_files_dir=output_target_files_temp_dir)
+
+  return output_target_files_temp_dir
+
+
+def generate_missing_images(target_files_dir):
+  """Generate any missing images from target files."""
+
+  # Regenerate IMAGES in the target directory.
+
+  add_img_args = [
+      '--verbose',
+      '--add_missing',
+  ]
+  if OPTIONS.rebuild_recovery:
+    add_img_args.append('--rebuild_recovery')
+  add_img_args.append(target_files_dir)
+
+  add_img_to_target_files.main(add_img_args)
+
+
+def rebuild_image_with_sepolicy(target_files_dir):
+  """Rebuilds odm.img or vendor.img to include merged sepolicy files.
+
+  If odm is present then odm is preferred -- otherwise vendor is used.
+  """
+  partition = 'vendor'
+  if os.path.exists(os.path.join(target_files_dir, 'ODM')) or os.path.exists(
+      os.path.join(target_files_dir, 'IMAGES/odm.img')):
+    partition = 'odm'
+  partition_img = '{}.img'.format(partition)
+  partition_map = '{}.map'.format(partition)
+
+  logger.info('Recompiling %s using the merged sepolicy files.', partition_img)
+
+  # Copy the combined SEPolicy file and framework hashes to the image that is
+  # being rebuilt.
+  def copy_selinux_file(input_path, output_filename):
+    input_filename = os.path.join(target_files_dir, input_path)
+    if not os.path.exists(input_filename):
+      input_filename = input_filename.replace('SYSTEM_EXT/', 'SYSTEM/system_ext/') \
+          .replace('PRODUCT/', 'SYSTEM/product/')
+      if not os.path.exists(input_filename):
+        logger.info('Skipping copy_selinux_file for %s', input_filename)
+        return
+    shutil.copy(
+        input_filename,
+        os.path.join(target_files_dir, partition.upper(), 'etc/selinux',
+                     output_filename))
+
+  copy_selinux_file('META/combined_sepolicy', 'precompiled_sepolicy')
+  copy_selinux_file('SYSTEM/etc/selinux/plat_sepolicy_and_mapping.sha256',
+                    'precompiled_sepolicy.plat_sepolicy_and_mapping.sha256')
+  copy_selinux_file(
+      'SYSTEM_EXT/etc/selinux/system_ext_sepolicy_and_mapping.sha256',
+      'precompiled_sepolicy.system_ext_sepolicy_and_mapping.sha256')
+  copy_selinux_file('PRODUCT/etc/selinux/product_sepolicy_and_mapping.sha256',
+                    'precompiled_sepolicy.product_sepolicy_and_mapping.sha256')
+
+  if not OPTIONS.vendor_otatools:
+    # Remove the partition from the merged target-files archive. It will be
+    # rebuilt later automatically by generate_missing_images().
+    os.remove(os.path.join(target_files_dir, 'IMAGES', partition_img))
+    return
+
+  # TODO(b/192253131): Remove the need for vendor_otatools by fixing
+  # backwards-compatibility issues when compiling images across releases.
+  if not OPTIONS.vendor_target_files:
+    raise ValueError(
+        'Expected vendor_target_files if vendor_otatools is not None.')
+  logger.info(
+      '%s recompilation will be performed using the vendor otatools.zip',
+      partition_img)
+
+  # Unzip the vendor build's otatools.zip and target-files archive.
+  vendor_otatools_dir = common.MakeTempDir(
+      prefix='merge_target_files_vendor_otatools_')
+  vendor_target_files_dir = common.MakeTempDir(
+      prefix='merge_target_files_vendor_target_files_')
+  common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
+  common.UnzipToDir(OPTIONS.vendor_target_files, vendor_target_files_dir)
+
+  # Copy the partition contents from the merged target-files archive to the
+  # vendor target-files archive.
+  shutil.rmtree(os.path.join(vendor_target_files_dir, partition.upper()))
+  shutil.copytree(
+      os.path.join(target_files_dir, partition.upper()),
+      os.path.join(vendor_target_files_dir, partition.upper()),
+      symlinks=True)
+
+  # Delete then rebuild the partition.
+  os.remove(os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
+  rebuild_partition_command = [
+      os.path.join(vendor_otatools_dir, 'bin', 'add_img_to_target_files'),
+      '--verbose',
+      '--add_missing',
+  ]
+  if OPTIONS.rebuild_recovery:
+    rebuild_partition_command.append('--rebuild_recovery')
+  rebuild_partition_command.append(vendor_target_files_dir)
+  logger.info('Recompiling %s: %s', partition_img,
+              ' '.join(rebuild_partition_command))
+  common.RunAndCheckOutput(rebuild_partition_command, verbose=True)
+
+  # Move the newly-created image to the merged target files dir.
+  if not os.path.exists(os.path.join(target_files_dir, 'IMAGES')):
+    os.makedirs(os.path.join(target_files_dir, 'IMAGES'))
+  shutil.move(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
+      os.path.join(target_files_dir, 'IMAGES', partition_img))
+  shutil.move(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+      os.path.join(target_files_dir, 'IMAGES', partition_map))
+
+  def copy_recovery_file(filename):
+    for subdir in ('VENDOR', 'SYSTEM/vendor'):
+      source = os.path.join(vendor_target_files_dir, subdir, filename)
+      if os.path.exists(source):
+        dest = os.path.join(target_files_dir, subdir, filename)
+        shutil.copy(source, dest)
+        return
+    logger.info('Skipping copy_recovery_file for %s, file not found', filename)
+
+  if OPTIONS.rebuild_recovery:
+    copy_recovery_file('etc/recovery.img')
+    copy_recovery_file('bin/install-recovery.sh')
+    copy_recovery_file('recovery-from-boot.p')
+
+
+def generate_super_empty_image(target_dir, output_super_empty):
+  """Generates super_empty image from target package.
+
+  Args:
+    target_dir: Path to the target file package which contains misc_info.txt for
+      detailed information for super image.
+    output_super_empty: If provided, copies a super_empty.img file from the
+      target files package to this path.
+  """
+  # Create super_empty.img using the merged misc_info.txt.
+
+  misc_info_txt = os.path.join(target_dir, 'META', 'misc_info.txt')
+
+  use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get(
+      'use_dynamic_partitions')
+
+  if use_dynamic_partitions != 'true' and output_super_empty:
+    raise ValueError(
+        'Building super_empty.img requires use_dynamic_partitions=true.')
+  elif use_dynamic_partitions == 'true':
+    super_empty_img = os.path.join(target_dir, 'IMAGES', 'super_empty.img')
+    build_super_image_args = [
+        misc_info_txt,
+        super_empty_img,
+    ]
+    build_super_image.main(build_super_image_args)
+
+    # Copy super_empty.img to the user-provided output_super_empty location.
+    if output_super_empty:
+      shutil.copyfile(super_empty_img, output_super_empty)
+
+
+def create_target_files_archive(output_zip, source_dir, temp_dir):
+  """Creates a target_files zip archive from the input source dir.
+
+  Args:
+    output_zip: The name of the zip archive target files package.
+    source_dir: The target directory contains package to be archived.
+    temp_dir: Path to temporary directory for any intermediate files.
+  """
+  output_target_files_list = os.path.join(temp_dir, 'output.list')
+  output_target_files_meta_dir = os.path.join(source_dir, 'META')
+
+  def files_from_path(target_path, extra_args=None):
+    """Gets files under the given path and return a sorted list."""
+    find_command = ['find', target_path] + (extra_args or [])
+    find_process = common.Run(
+        find_command, stdout=subprocess.PIPE, verbose=False)
+    return common.RunAndCheckOutput(['sort'],
+                                    stdin=find_process.stdout,
+                                    verbose=False)
+
+  # META content appears first in the zip. This is done by the
+  # standard build system for optimized extraction of those files,
+  # so we do the same step for merged target_files.zips here too.
+  meta_content = files_from_path(output_target_files_meta_dir)
+  other_content = files_from_path(
+      source_dir,
+      ['-path', output_target_files_meta_dir, '-prune', '-o', '-print'])
+
+  with open(output_target_files_list, 'w') as f:
+    f.write(meta_content)
+    f.write(other_content)
+
+  command = [
+      'soong_zip',
+      '-d',
+      '-o',
+      os.path.abspath(output_zip),
+      '-C',
+      source_dir,
+      '-r',
+      output_target_files_list,
+  ]
+
+  logger.info('creating %s', output_zip)
+  common.RunAndCheckOutput(command, verbose=True)
+  logger.info('finished creating %s', output_zip)
+
+
+def merge_target_files(temp_dir):
+  """Merges two target files packages together.
+
+  This function uses framework and vendor target files packages as input,
+  performs various file extractions, special case processing, and finally
+  creates a merged zip archive as output.
+
+  Args:
+    temp_dir: The name of a directory we use when we extract items from the
+      input target files packages, and also a scratch directory that we use for
+      temporary files.
+  """
+
+  logger.info('starting: merge framework %s and vendor %s into output %s',
+              OPTIONS.framework_target_files, OPTIONS.vendor_target_files,
+              OPTIONS.output_target_files)
+
+  output_target_files_temp_dir = create_merged_package(temp_dir)
+
+  partition_map = common.PartitionMapFromTargetFiles(
+      output_target_files_temp_dir)
+
+  compatibility_errors = merge_compatibility_checks.CheckCompatibility(
+      target_files_dir=output_target_files_temp_dir,
+      partition_map=partition_map)
+  if compatibility_errors:
+    for error in compatibility_errors:
+      logger.error(error)
+    raise ExternalError(
+        'Found incompatibilities in the merged target files package.')
+
+  # Include the compiled policy in an image if requested.
+  if OPTIONS.rebuild_sepolicy:
+    rebuild_image_with_sepolicy(output_target_files_temp_dir)
+
+  generate_missing_images(output_target_files_temp_dir)
+
+  generate_super_empty_image(output_target_files_temp_dir,
+                             OPTIONS.output_super_empty)
+
+  # Finally, create the output target files zip archive and/or copy the
+  # output items to the output target files directory.
+
+  if OPTIONS.output_dir:
+    merge_utils.CopyItems(output_target_files_temp_dir, OPTIONS.output_dir,
+                          OPTIONS.output_item_list)
+
+  if not OPTIONS.output_target_files:
+    return
+
+  create_target_files_archive(OPTIONS.output_target_files,
+                              output_target_files_temp_dir, temp_dir)
+
+  # Create the IMG package from the merged target files package.
+  if OPTIONS.output_img:
+    img_from_target_files.main(
+        [OPTIONS.output_target_files, OPTIONS.output_img])
+
+  # Create the OTA package from the merged target files package.
+
+  if OPTIONS.output_ota:
+    ota_from_target_files.main(
+        [OPTIONS.output_target_files, OPTIONS.output_ota])
+
+
+def main():
+  """The main function.
+
+  Process command line arguments, then call merge_target_files to
+  perform the heavy lifting.
+  """
+
+  common.InitLogging()
+
+  def option_handler(o, a):
+    if o == '--system-target-files':
+      logger.warning(
+          '--system-target-files has been renamed to --framework-target-files')
+      OPTIONS.framework_target_files = a
+    elif o == '--framework-target-files':
+      OPTIONS.framework_target_files = a
+    elif o == '--system-item-list':
+      logger.warning(
+          '--system-item-list has been renamed to --framework-item-list')
+      OPTIONS.framework_item_list = a
+    elif o == '--framework-item-list':
+      OPTIONS.framework_item_list = a
+    elif o == '--system-misc-info-keys':
+      logger.warning('--system-misc-info-keys has been renamed to '
+                     '--framework-misc-info-keys')
+      OPTIONS.framework_misc_info_keys = a
+    elif o == '--framework-misc-info-keys':
+      OPTIONS.framework_misc_info_keys = a
+    elif o == '--other-target-files':
+      logger.warning(
+          '--other-target-files has been renamed to --vendor-target-files')
+      OPTIONS.vendor_target_files = a
+    elif o == '--vendor-target-files':
+      OPTIONS.vendor_target_files = a
+    elif o == '--other-item-list':
+      logger.warning('--other-item-list has been renamed to --vendor-item-list')
+      OPTIONS.vendor_item_list = a
+    elif o == '--vendor-item-list':
+      OPTIONS.vendor_item_list = a
+    elif o == '--output-target-files':
+      OPTIONS.output_target_files = a
+    elif o == '--output-dir':
+      OPTIONS.output_dir = a
+    elif o == '--output-item-list':
+      OPTIONS.output_item_list = a
+    elif o == '--output-ota':
+      OPTIONS.output_ota = a
+    elif o == '--output-img':
+      OPTIONS.output_img = a
+    elif o == '--output-super-empty':
+      OPTIONS.output_super_empty = a
+    elif o == '--rebuild_recovery' or o == '--rebuild-recovery':
+      OPTIONS.rebuild_recovery = True
+    elif o == '--allow-duplicate-apkapex-keys':
+      OPTIONS.allow_duplicate_apkapex_keys = True
+    elif o == '--vendor-otatools':
+      OPTIONS.vendor_otatools = a
+    elif o == '--rebuild-sepolicy':
+      OPTIONS.rebuild_sepolicy = True
+    elif o == '--keep-tmp':
+      OPTIONS.keep_tmp = True
+    elif o == '--framework-dexpreopt-config':
+      OPTIONS.framework_dexpreopt_config = a
+    elif o == '--framework-dexpreopt-tools':
+      OPTIONS.framework_dexpreopt_tools = a
+    elif o == '--vendor-dexpreopt-config':
+      OPTIONS.vendor_dexpreopt_config = a
+    else:
+      return False
+    return True
+
+  args = common.ParseOptions(
+      sys.argv[1:],
+      __doc__,
+      extra_long_opts=[
+          'system-target-files=',
+          'framework-target-files=',
+          'system-item-list=',
+          'framework-item-list=',
+          'system-misc-info-keys=',
+          'framework-misc-info-keys=',
+          'other-target-files=',
+          'vendor-target-files=',
+          'other-item-list=',
+          'vendor-item-list=',
+          'output-target-files=',
+          'output-dir=',
+          'output-item-list=',
+          'output-ota=',
+          'output-img=',
+          'output-super-empty=',
+          'framework-dexpreopt-config=',
+          'framework-dexpreopt-tools=',
+          'vendor-dexpreopt-config=',
+          'rebuild_recovery',
+          'rebuild-recovery',
+          'allow-duplicate-apkapex-keys',
+          'vendor-otatools=',
+          'rebuild-sepolicy',
+          'keep-tmp',
+      ],
+      extra_option_handler=option_handler)
+
+  # pylint: disable=too-many-boolean-expressions
+  if (args or OPTIONS.framework_target_files is None or
+      OPTIONS.vendor_target_files is None or
+      (OPTIONS.output_target_files is None and OPTIONS.output_dir is None) or
+      (OPTIONS.output_dir is not None and not OPTIONS.output_item_list) or
+      (OPTIONS.rebuild_recovery and not OPTIONS.rebuild_sepolicy)):
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  with zipfile.ZipFile(OPTIONS.framework_target_files, allowZip64=True) as fz:
+    framework_namelist = fz.namelist()
+  with zipfile.ZipFile(OPTIONS.vendor_target_files, allowZip64=True) as vz:
+    vendor_namelist = vz.namelist()
+
+  if OPTIONS.framework_item_list:
+    OPTIONS.framework_item_list = common.LoadListFromFile(
+        OPTIONS.framework_item_list)
+  else:
+    OPTIONS.framework_item_list = merge_utils.InferItemList(
+        input_namelist=framework_namelist, framework=True)
+  OPTIONS.framework_partition_set = merge_utils.ItemListToPartitionSet(
+      OPTIONS.framework_item_list)
+
+  if OPTIONS.framework_misc_info_keys:
+    OPTIONS.framework_misc_info_keys = common.LoadListFromFile(
+        OPTIONS.framework_misc_info_keys)
+  else:
+    OPTIONS.framework_misc_info_keys = merge_utils.InferFrameworkMiscInfoKeys(
+        input_namelist=framework_namelist)
+
+  if OPTIONS.vendor_item_list:
+    OPTIONS.vendor_item_list = common.LoadListFromFile(OPTIONS.vendor_item_list)
+  else:
+    OPTIONS.vendor_item_list = merge_utils.InferItemList(
+        input_namelist=vendor_namelist, framework=False)
+  OPTIONS.vendor_partition_set = merge_utils.ItemListToPartitionSet(
+      OPTIONS.vendor_item_list)
+
+  if OPTIONS.output_item_list:
+    OPTIONS.output_item_list = common.LoadListFromFile(OPTIONS.output_item_list)
+
+  if not merge_utils.ValidateConfigLists():
+    sys.exit(1)
+
+  temp_dir = common.MakeTempDir(prefix='merge_target_files_')
+  try:
+    merge_target_files(temp_dir)
+  finally:
+    if OPTIONS.keep_tmp:
+      logger.info('Keeping temp_dir %s', temp_dir)
+    else:
+      common.Cleanup()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
new file mode 100644
index 0000000..f623ad2
--- /dev/null
+++ b/tools/releasetools/merge/merge_utils.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+#
+"""Common utility functions shared by merge_* scripts.
+
+Expects items in OPTIONS prepared by merge_target_files.py.
+"""
+
+import fnmatch
+import logging
+import os
+import re
+import shutil
+import zipfile
+
+import common
+
+logger = logging.getLogger(__name__)
+OPTIONS = common.OPTIONS
+
+
+def ExtractItems(input_zip, output_dir, extract_item_list):
+  """Extracts items in extract_item_list from a zip to a dir."""
+
+  # Filter the extract_item_list to remove any items that do not exist in the
+  # zip file. Otherwise, the extraction step will fail.
+
+  with zipfile.ZipFile(input_zip, allowZip64=True) as input_zipfile:
+    input_namelist = input_zipfile.namelist()
+
+  filtered_extract_item_list = []
+  for pattern in extract_item_list:
+    if fnmatch.filter(input_namelist, pattern):
+      filtered_extract_item_list.append(pattern)
+
+  common.UnzipToDir(input_zip, output_dir, filtered_extract_item_list)
+
+
+def CopyItems(from_dir, to_dir, patterns):
+  """Similar to ExtractItems() except uses an input dir instead of zip."""
+  file_paths = []
+  for dirpath, _, filenames in os.walk(from_dir):
+    file_paths.extend(
+        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
+        for filename in filenames)
+
+  filtered_file_paths = set()
+  for pattern in patterns:
+    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
+
+  for file_path in filtered_file_paths:
+    original_file_path = os.path.join(from_dir, file_path)
+    copied_file_path = os.path.join(to_dir, file_path)
+    copied_file_dir = os.path.dirname(copied_file_path)
+    if not os.path.exists(copied_file_dir):
+      os.makedirs(copied_file_dir)
+    if os.path.islink(original_file_path):
+      os.symlink(os.readlink(original_file_path), copied_file_path)
+    else:
+      shutil.copyfile(original_file_path, copied_file_path)
+
+
+def WriteSortedData(data, path):
+  """Writes the sorted contents of either a list or dict to file.
+
+  This function sorts the contents of the list or dict and then writes the
+  resulting sorted contents to a file specified by path.
+
+  Args:
+    data: The list or dict to sort and write.
+    path: Path to the file to write the sorted values to. The file at path will
+      be overridden if it exists.
+  """
+  with open(path, 'w') as output:
+    for entry in sorted(data):
+      out_str = '{}={}\n'.format(entry, data[entry]) if isinstance(
+          data, dict) else '{}\n'.format(entry)
+      output.write(out_str)
+
+
+def ValidateConfigLists():
+  """Performs validations on the merge config lists.
+
+  Returns:
+    False if a validation fails, otherwise true.
+  """
+  has_error = False
+
+  # Check that partitions only come from one input.
+  for partition in _FRAMEWORK_PARTITIONS.union(_VENDOR_PARTITIONS):
+    image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
+    in_framework = (
+        any(item.startswith(partition) for item in OPTIONS.framework_item_list)
+        or image_path in OPTIONS.framework_item_list)
+    in_vendor = (
+        any(item.startswith(partition) for item in OPTIONS.vendor_item_list) or
+        image_path in OPTIONS.vendor_item_list)
+    if in_framework and in_vendor:
+      logger.error(
+          'Cannot extract items from %s for both the framework and vendor'
+          ' builds. Please ensure only one merge config item list'
+          ' includes %s.', partition, partition)
+      has_error = True
+
+  if any([
+      key in OPTIONS.framework_misc_info_keys
+      for key in ('dynamic_partition_list', 'super_partition_groups')
+  ]):
+    logger.error('Dynamic partition misc info keys should come from '
+                 'the vendor instance of META/misc_info.txt.')
+    has_error = True
+
+  return not has_error
+
+
+# In an item list (framework or vendor), we may see entries that select whole
+# partitions. Such an entry might look like this 'SYSTEM/*' (e.g., for the
+# system partition). The following regex matches this and extracts the
+# partition name.
+
+_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
+
+
+def ItemListToPartitionSet(item_list):
+  """Converts a target files item list to a partition set.
+
+  The item list contains items that might look like 'SYSTEM/*' or 'VENDOR/*' or
+  'OTA/android-info.txt'. Items that end in '/*' are assumed to match entire
+  directories where 'SYSTEM' or 'VENDOR' is a directory name that identifies the
+  contents of a partition of the same name. Other items in the list, such as the
+  'OTA' example contain metadata. This function iterates such a list, returning
+  a set that contains the partition entries.
+
+  Args:
+    item_list: A list of items in a target files package.
+
+  Returns:
+    A set of partitions extracted from the list of items.
+  """
+
+  partition_set = set()
+
+  for item in item_list:
+    partition_match = _PARTITION_ITEM_PATTERN.search(item.strip())
+    partition_tag = partition_match.group(
+        1).lower() if partition_match else None
+
+    if partition_tag:
+      partition_set.add(partition_tag)
+
+  return partition_set
+
+
+# Partitions that are grabbed from the framework partial build by default.
+_FRAMEWORK_PARTITIONS = {
+    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm'
+}
+# Partitions that are grabbed from the vendor partial build by default.
+_VENDOR_PARTITIONS = {
+    'vendor', 'odm', 'oem', 'boot', 'vendor_boot', 'recovery',
+    'prebuilt_images', 'radio', 'data', 'vendor_dlkm', 'odm_dlkm'
+}
+
+
+def InferItemList(input_namelist, framework):
+  item_list = []
+
+  # Some META items are grabbed from partial builds directly.
+  # Others are combined in merge_meta.py.
+  if framework:
+    item_list.extend([
+        'META/liblz4.so',
+        'META/postinstall_config.txt',
+        'META/update_engine_config.txt',
+        'META/zucchini_config.txt',
+    ])
+  else:  # vendor
+    item_list.extend([
+        'META/kernel_configs.txt',
+        'META/kernel_version.txt',
+        'META/otakeys.txt',
+        'META/releasetools.py',
+        'OTA/android-info.txt',
+    ])
+
+  # Grab a set of items for the expected partitions in the partial build.
+  for partition in (_FRAMEWORK_PARTITIONS if framework else _VENDOR_PARTITIONS):
+    for namelist in input_namelist:
+      if namelist.startswith('%s/' % partition.upper()):
+        fs_config_prefix = '' if partition == 'system' else '%s_' % partition
+        item_list.extend([
+            '%s/*' % partition.upper(),
+            'IMAGES/%s.img' % partition,
+            'IMAGES/%s.map' % partition,
+            'META/%sfilesystem_config.txt' % fs_config_prefix,
+        ])
+        break
+
+  return sorted(item_list)
+
+
+def InferFrameworkMiscInfoKeys(input_namelist):
+  keys = [
+      'ab_update',
+      'avb_vbmeta_system',
+      'avb_vbmeta_system_algorithm',
+      'avb_vbmeta_system_key_path',
+      'avb_vbmeta_system_rollback_index_location',
+      'default_system_dev_certificate',
+  ]
+
+  for partition in _FRAMEWORK_PARTITIONS:
+    for namelist in input_namelist:
+      if namelist.startswith('%s/' % partition.upper()):
+        fs_type_prefix = '' if partition == 'system' else '%s_' % partition
+        keys.extend([
+            'avb_%s_hashtree_enable' % partition,
+            'avb_%s_add_hashtree_footer_args' % partition,
+            '%s_disable_sparse' % partition,
+            'building_%s_image' % partition,
+            '%sfs_type' % fs_type_prefix,
+        ])
+
+  return sorted(keys)
diff --git a/tools/releasetools/merge/test_merge_compatibility_checks.py b/tools/releasetools/merge/test_merge_compatibility_checks.py
new file mode 100644
index 0000000..0f319de
--- /dev/null
+++ b/tools/releasetools/merge/test_merge_compatibility_checks.py
@@ -0,0 +1,114 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os.path
+import shutil
+
+import common
+import merge_compatibility_checks
+import merge_target_files
+import test_utils
+
+
+class MergeCompatibilityChecksTest(test_utils.ReleaseToolsTestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.partition_map = {
+        'system': 'system',
+        'system_ext': 'system_ext',
+        'product': 'product',
+        'vendor': 'vendor',
+        'odm': 'odm',
+    }
+    self.OPTIONS = merge_target_files.OPTIONS
+    self.OPTIONS.framework_partition_set = set(
+        ['product', 'system', 'system_ext'])
+    self.OPTIONS.vendor_partition_set = set(['odm', 'vendor'])
+
+  def test_CheckCombinedSepolicy(self):
+    product_out_dir = common.MakeTempDir()
+
+    def write_temp_file(path, data=''):
+      full_path = os.path.join(product_out_dir, path)
+      if not os.path.exists(os.path.dirname(full_path)):
+        os.makedirs(os.path.dirname(full_path))
+      with open(full_path, 'w') as f:
+        f.write(data)
+
+    write_temp_file(
+        'system/etc/vintf/compatibility_matrix.device.xml', """
+      <compatibility-matrix>
+        <sepolicy>
+          <kernel-sepolicy-version>30</kernel-sepolicy-version>
+        </sepolicy>
+      </compatibility-matrix>""")
+    write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
+
+    write_temp_file('system/etc/selinux/plat_sepolicy.cil')
+    write_temp_file('system/etc/selinux/mapping/30.0.cil')
+    write_temp_file('product/etc/selinux/mapping/30.0.cil')
+    write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
+    write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
+
+    cmd = merge_compatibility_checks.CheckCombinedSepolicy(
+        product_out_dir, self.partition_map, execute=False)
+    self.assertEqual(' '.join(cmd),
+                     ('secilc -m -M true -G -N -c 30 '
+                      '-o {OTP}/META/combined_sepolicy -f /dev/null '
+                      '{OTP}/system/etc/selinux/plat_sepolicy.cil '
+                      '{OTP}/system/etc/selinux/mapping/30.0.cil '
+                      '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
+                      '{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
+                      '{OTP}/product/etc/selinux/mapping/30.0.cil').format(
+                          OTP=product_out_dir))
+
+  def _copy_apex(self, source, output_dir, partition):
+    shutil.copy(
+        source,
+        os.path.join(output_dir, partition, 'apex', os.path.basename(source)))
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckApexDuplicatePackages(self):
+    output_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
+    os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
+
+    self._copy_apex(
+        os.path.join(self.testdata_dir, 'has_apk.apex'), output_dir, 'SYSTEM')
+    self._copy_apex(
+        os.path.join(test_utils.get_current_dir(),
+                     'com.android.apex.compressed.v1.capex'), output_dir,
+        'VENDOR')
+    self.assertEqual(
+        len(
+            merge_compatibility_checks.CheckApexDuplicatePackages(
+                output_dir, self.partition_map)), 0)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckApexDuplicatePackages_RaisesOnPackageInMultiplePartitions(self):
+    output_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
+    os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
+
+    same_apex_package = os.path.join(self.testdata_dir, 'has_apk.apex')
+    self._copy_apex(same_apex_package, output_dir, 'SYSTEM')
+    self._copy_apex(same_apex_package, output_dir, 'VENDOR')
+    self.assertEqual(
+        merge_compatibility_checks.CheckApexDuplicatePackages(
+            output_dir, self.partition_map)[0],
+        'Duplicate APEX package_names found in multiple partitions: com.android.wifi'
+    )
diff --git a/tools/releasetools/merge/test_merge_meta.py b/tools/releasetools/merge/test_merge_meta.py
new file mode 100644
index 0000000..34fe580
--- /dev/null
+++ b/tools/releasetools/merge/test_merge_meta.py
@@ -0,0 +1,110 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os.path
+import shutil
+
+import common
+import merge_meta
+import merge_target_files
+import test_utils
+
+
+class MergeMetaTest(test_utils.ReleaseToolsTestCase):
+
+  def setUp(self):
+    self.testdata_dir = test_utils.get_testdata_dir()
+    self.OPTIONS = merge_target_files.OPTIONS
+    self.OPTIONS.framework_partition_set = set(
+        ['product', 'system', 'system_ext'])
+    self.OPTIONS.vendor_partition_set = set(['odm', 'vendor'])
+
+  def test_MergePackageKeys_ReturnsTrueIfNoConflicts(self):
+    output_meta_dir = common.MakeTempDir()
+
+    framework_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
+        os.path.join(framework_meta_dir, 'apexkeys.txt'))
+
+    vendor_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apexkeys_vendor.txt'),
+        os.path.join(vendor_meta_dir, 'apexkeys.txt'))
+
+    merge_meta.MergePackageKeys(framework_meta_dir, vendor_meta_dir,
+                                output_meta_dir, 'apexkeys.txt')
+
+    merged_entries = []
+    merged_path = os.path.join(self.testdata_dir, 'apexkeys_merge.txt')
+
+    with open(merged_path) as f:
+      merged_entries = f.read().split('\n')
+
+    output_entries = []
+    output_path = os.path.join(output_meta_dir, 'apexkeys.txt')
+
+    with open(output_path) as f:
+      output_entries = f.read().split('\n')
+
+    return self.assertEqual(merged_entries, output_entries)
+
+  def test_MergePackageKeys_ReturnsFalseIfConflictsPresent(self):
+    output_meta_dir = common.MakeTempDir()
+
+    framework_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
+        os.path.join(framework_meta_dir, 'apexkeys.txt'))
+
+    conflict_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apexkeys_framework_conflict.txt'),
+        os.path.join(conflict_meta_dir, 'apexkeys.txt'))
+
+    self.assertRaises(ValueError, merge_meta.MergePackageKeys,
+                      framework_meta_dir, conflict_meta_dir, output_meta_dir,
+                      'apexkeys.txt')
+
+  def test_MergePackageKeys_HandlesApkCertsSyntax(self):
+    output_meta_dir = common.MakeTempDir()
+
+    framework_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apkcerts_framework.txt'),
+        os.path.join(framework_meta_dir, 'apkcerts.txt'))
+
+    vendor_meta_dir = common.MakeTempDir()
+    os.symlink(
+        os.path.join(self.testdata_dir, 'apkcerts_vendor.txt'),
+        os.path.join(vendor_meta_dir, 'apkcerts.txt'))
+
+    merge_meta.MergePackageKeys(framework_meta_dir, vendor_meta_dir,
+                                output_meta_dir, 'apkcerts.txt')
+
+    merged_entries = []
+    merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
+
+    with open(merged_path) as f:
+      merged_entries = f.read().split('\n')
+
+    output_entries = []
+    output_path = os.path.join(output_meta_dir, 'apkcerts.txt')
+
+    with open(output_path) as f:
+      output_entries = f.read().split('\n')
+
+    return self.assertEqual(merged_entries, output_entries)
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
new file mode 100644
index 0000000..1949050
--- /dev/null
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -0,0 +1,197 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os.path
+
+import common
+import merge_target_files
+import merge_utils
+import test_utils
+
+
+class MergeUtilsTest(test_utils.ReleaseToolsTestCase):
+
+  def setUp(self):
+    self.OPTIONS = merge_target_files.OPTIONS
+
+  def test_CopyItems_CopiesItemsMatchingPatterns(self):
+
+    def createEmptyFile(path):
+      if not os.path.exists(os.path.dirname(path)):
+        os.makedirs(os.path.dirname(path))
+      open(path, 'a').close()
+      return path
+
+    def createSymLink(source, dest):
+      os.symlink(source, dest)
+      return dest
+
+    def getRelPaths(start, filepaths):
+      return set(
+          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
+
+    input_dir = common.MakeTempDir()
+    output_dir = common.MakeTempDir()
+    expected_copied_items = []
+    actual_copied_items = []
+    patterns = ['*.cpp', 'subdir/*.txt']
+
+    # Create various files that we expect to get copied because they
+    # match one of the patterns.
+    expected_copied_items.extend([
+        createEmptyFile(os.path.join(input_dir, 'a.cpp')),
+        createEmptyFile(os.path.join(input_dir, 'b.cpp')),
+        createEmptyFile(os.path.join(input_dir, 'subdir', 'c.txt')),
+        createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
+        createEmptyFile(
+            os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
+        createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
+    ])
+    # Create some more files that we expect to not get copied.
+    createEmptyFile(os.path.join(input_dir, 'a.h'))
+    createEmptyFile(os.path.join(input_dir, 'b.h'))
+    createEmptyFile(os.path.join(input_dir, 'subdir', 'subsubdir', 'f.gif'))
+    createSymLink('a.h', os.path.join(input_dir, 'a_link.h'))
+
+    # Copy items.
+    merge_utils.CopyItems(input_dir, output_dir, patterns)
+
+    # Assert the actual copied items match the ones we expected.
+    for dirpath, _, filenames in os.walk(output_dir):
+      actual_copied_items.extend(
+          os.path.join(dirpath, filename) for filename in filenames)
+    self.assertEqual(
+        getRelPaths(output_dir, actual_copied_items),
+        getRelPaths(input_dir, expected_copied_items))
+    self.assertEqual(
+        os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
+
+  def test_ValidateConfigLists_ReturnsFalseIfSharedExtractedPartition(self):
+    self.OPTIONS.system_item_list = [
+        'SYSTEM/*',
+    ]
+    self.OPTIONS.vendor_item_list = [
+        'SYSTEM/my_system_file',
+        'VENDOR/*',
+    ]
+    self.OPTIONS.vendor_item_list.append('SYSTEM/my_system_file')
+    self.assertFalse(merge_utils.ValidateConfigLists())
+
+  def test_ValidateConfigLists_ReturnsFalseIfSharedExtractedPartitionImage(
+      self):
+    self.OPTIONS.system_item_list = [
+        'SYSTEM/*',
+    ]
+    self.OPTIONS.vendor_item_list = [
+        'IMAGES/system.img',
+        'VENDOR/*',
+    ]
+    self.assertFalse(merge_utils.ValidateConfigLists())
+
+  def test_ValidateConfigLists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
+    for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
+      self.OPTIONS.framework_misc_info_keys = [bad_key]
+      self.assertFalse(merge_utils.ValidateConfigLists())
+
+  def test_ItemListToPartitionSet(self):
+    item_list = [
+        'META/apexkeys.txt',
+        'META/apkcerts.txt',
+        'META/filesystem_config.txt',
+        'PRODUCT/*',
+        'SYSTEM/*',
+        'SYSTEM_EXT/*',
+    ]
+    partition_set = merge_utils.ItemListToPartitionSet(item_list)
+    self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
+
+  def test_InferItemList_Framework(self):
+    zip_namelist = [
+        'SYSTEM/my_system_file',
+        'PRODUCT/my_product_file',
+    ]
+
+    item_list = merge_utils.InferItemList(zip_namelist, framework=True)
+
+    expected_framework_item_list = [
+        'IMAGES/product.img',
+        'IMAGES/product.map',
+        'IMAGES/system.img',
+        'IMAGES/system.map',
+        'META/filesystem_config.txt',
+        'META/liblz4.so',
+        'META/postinstall_config.txt',
+        'META/product_filesystem_config.txt',
+        'META/update_engine_config.txt',
+        'META/zucchini_config.txt',
+        'PRODUCT/*',
+        'SYSTEM/*',
+    ]
+
+    self.assertEqual(item_list, expected_framework_item_list)
+
+  def test_InferItemList_Vendor(self):
+    zip_namelist = [
+        'VENDOR/my_vendor_file',
+        'ODM/my_odm_file',
+    ]
+
+    item_list = merge_utils.InferItemList(zip_namelist, framework=False)
+
+    expected_vendor_item_list = [
+        'IMAGES/odm.img',
+        'IMAGES/odm.map',
+        'IMAGES/vendor.img',
+        'IMAGES/vendor.map',
+        'META/kernel_configs.txt',
+        'META/kernel_version.txt',
+        'META/odm_filesystem_config.txt',
+        'META/otakeys.txt',
+        'META/releasetools.py',
+        'META/vendor_filesystem_config.txt',
+        'ODM/*',
+        'OTA/android-info.txt',
+        'VENDOR/*',
+    ]
+    self.assertEqual(item_list, expected_vendor_item_list)
+
+  def test_InferFrameworkMiscInfoKeys(self):
+    zip_namelist = [
+        'SYSTEM/my_system_file',
+        'SYSTEM_EXT/my_system_ext_file',
+    ]
+
+    keys = merge_utils.InferFrameworkMiscInfoKeys(zip_namelist)
+
+    expected_keys = [
+        'ab_update',
+        'avb_system_add_hashtree_footer_args',
+        'avb_system_ext_add_hashtree_footer_args',
+        'avb_system_ext_hashtree_enable',
+        'avb_system_hashtree_enable',
+        'avb_vbmeta_system',
+        'avb_vbmeta_system_algorithm',
+        'avb_vbmeta_system_key_path',
+        'avb_vbmeta_system_rollback_index_location',
+        'building_system_ext_image',
+        'building_system_image',
+        'default_system_dev_certificate',
+        'fs_type',
+        'system_disable_sparse',
+        'system_ext_disable_sparse',
+        'system_ext_fs_type',
+    ]
+    self.assertEqual(keys, expected_keys)
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
deleted file mode 100755
index 46ffdb7..0000000
--- a/tools/releasetools/merge_target_files.py
+++ /dev/null
@@ -1,1825 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may not
-# use this file except in compliance with the License. You may obtain a copy of
-# the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations under
-# the License.
-#
-"""This script merges two partial target files packages.
-
-One input package contains framework files, and the other contains vendor files.
-
-This script produces a complete, merged target files package:
-  - This package can be used to generate a flashable IMG package.
-    See --output-img.
-  - This package can be used to generate an OTA package. See --output-ota.
-  - The merged package is checked for compatibility between the two inputs.
-
-Usage: merge_target_files [args]
-
-  --framework-target-files framework-target-files-zip-archive
-      The input target files package containing framework bits. This is a zip
-      archive.
-
-  --framework-item-list framework-item-list-file
-      The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_FRAMEWORK_ITEM_LIST if provided.
-
-  --framework-misc-info-keys framework-misc-info-keys-file
-      The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_FRAMEWORK_MISC_INFO_KEYS if provided.
-
-  --vendor-target-files vendor-target-files-zip-archive
-      The input target files package containing vendor bits. This is a zip
-      archive.
-
-  --vendor-item-list vendor-item-list-file
-      The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_VENDOR_ITEM_LIST if provided.
-
-  --output-target-files output-target-files-package
-      If provided, the output merged target files package. Also a zip archive.
-
-  --output-dir output-directory
-      If provided, the destination directory for saving merged files. Requires
-      the --output-item-list flag.
-      Can be provided alongside --output-target-files, or by itself.
-
-  --output-item-list output-item-list-file.
-      The optional path to a newline-separated config file that specifies the
-      file patterns to copy into the --output-dir. Required if providing
-      the --output-dir flag.
-
-  --output-ota output-ota-package
-      The output ota package. This is a zip archive. Use of this flag may
-      require passing the --path common flag; see common.py.
-
-  --output-img output-img-package
-      The output img package, suitable for use with 'fastboot update'. Use of
-      this flag may require passing the --path common flag; see common.py.
-
-  --output-super-empty output-super-empty-image
-      If provided, creates a super_empty.img file from the merged target
-      files package and saves it at this path.
-
-  --rebuild_recovery
-      Deprecated; does nothing.
-
-  --allow-duplicate-apkapex-keys
-      If provided, duplicate APK/APEX keys are ignored and the value from the
-      framework is used.
-
-  --rebuild-sepolicy
-      If provided, rebuilds odm.img or vendor.img to include merged sepolicy
-      files. If odm is present then odm is preferred.
-
-  --vendor-otatools otatools.zip
-      If provided, use this otatools.zip when recompiling the odm or vendor
-      image to include sepolicy.
-
-  --keep-tmp
-      Keep tempoary files for debugging purposes.
-
-  The following only apply when using the VSDK to perform dexopt on vendor apps:
-
-  --framework-dexpreopt-config
-      If provided, the location of framwework's dexpreopt_config.zip.
-
-  --framework-dexpreopt-tools
-      if provided, the location of framework's dexpreopt_tools.zip.
-
-  --vendor-dexpreopt-config
-      If provided, the location of vendor's dexpreopt_config.zip.
-"""
-
-from __future__ import print_function
-
-import fnmatch
-import glob
-import json
-import logging
-import os
-import re
-import shutil
-import subprocess
-import sys
-import zipfile
-from xml.etree import ElementTree
-
-import add_img_to_target_files
-import apex_utils
-import build_image
-import build_super_image
-import check_target_files_vintf
-import common
-import img_from_target_files
-import find_shareduid_violation
-import ota_from_target_files
-import sparse_img
-import verity_utils
-
-from common import AddCareMapForAbOta, ExternalError, PARTITIONS_WITH_CARE_MAP
-
-logger = logging.getLogger(__name__)
-
-OPTIONS = common.OPTIONS
-# Always turn on verbose logging.
-OPTIONS.verbose = True
-OPTIONS.framework_target_files = None
-OPTIONS.framework_item_list = None
-OPTIONS.framework_misc_info_keys = None
-OPTIONS.vendor_target_files = None
-OPTIONS.vendor_item_list = None
-OPTIONS.output_target_files = None
-OPTIONS.output_dir = None
-OPTIONS.output_item_list = None
-OPTIONS.output_ota = None
-OPTIONS.output_img = None
-OPTIONS.output_super_empty = None
-# TODO(b/132730255): Remove this option.
-OPTIONS.rebuild_recovery = False
-# TODO(b/150582573): Remove this option.
-OPTIONS.allow_duplicate_apkapex_keys = False
-OPTIONS.vendor_otatools = None
-OPTIONS.rebuild_sepolicy = False
-OPTIONS.keep_tmp = False
-OPTIONS.framework_dexpreopt_config = None
-OPTIONS.framework_dexpreopt_tools = None
-OPTIONS.vendor_dexpreopt_config = None
-
-# In an item list (framework or vendor), we may see entries that select whole
-# partitions. Such an entry might look like this 'SYSTEM/*' (e.g., for the
-# system partition). The following regex matches this and extracts the
-# partition name.
-
-PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
-
-# In apexkeys.txt or apkcerts.txt, we will find partition tags on each entry in
-# the file. We use these partition tags to filter the entries in those files
-# from the two different target files packages to produce a merged apexkeys.txt
-# or apkcerts.txt file. A partition tag (e.g., for the product partition) looks
-# like this: 'partition="product"'. We use the group syntax grab the value of
-# the tag. We use non-greedy matching in case there are other fields on the
-# same line.
-
-PARTITION_TAG_PATTERN = re.compile(r'partition="(.*?)"')
-
-# The sorting algorithm for apexkeys.txt and apkcerts.txt does not include the
-# ".apex" or ".apk" suffix, so we use the following pattern to extract a key.
-
-MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
-
-# DEFAULT_FRAMEWORK_ITEM_LIST is a list of items to extract from the partial
-# framework target files package as is, meaning these items will land in the
-# output target files package exactly as they appear in the input partial
-# framework target files package.
-
-DEFAULT_FRAMEWORK_ITEM_LIST = (
-    'META/apkcerts.txt',
-    'META/filesystem_config.txt',
-    'META/root_filesystem_config.txt',
-    'META/update_engine_config.txt',
-    'PRODUCT/*',
-    'ROOT/*',
-    'SYSTEM/*',
-)
-
-# DEFAULT_FRAMEWORK_MISC_INFO_KEYS is a list of keys to obtain from the
-# framework instance of META/misc_info.txt. The remaining keys should come
-# from the vendor instance.
-
-DEFAULT_FRAMEWORK_MISC_INFO_KEYS = (
-    'avb_system_hashtree_enable',
-    'avb_system_add_hashtree_footer_args',
-    'avb_system_key_path',
-    'avb_system_algorithm',
-    'avb_system_rollback_index_location',
-    'avb_product_hashtree_enable',
-    'avb_product_add_hashtree_footer_args',
-    'avb_system_ext_hashtree_enable',
-    'avb_system_ext_add_hashtree_footer_args',
-    'system_root_image',
-    'root_dir',
-    'ab_update',
-    'default_system_dev_certificate',
-    'system_size',
-    'building_system_image',
-    'building_system_ext_image',
-    'building_product_image',
-)
-
-# DEFAULT_VENDOR_ITEM_LIST is a list of items to extract from the partial
-# vendor target files package as is, meaning these items will land in the output
-# target files package exactly as they appear in the input partial vendor target
-# files package.
-
-DEFAULT_VENDOR_ITEM_LIST = (
-    'META/boot_filesystem_config.txt',
-    'META/otakeys.txt',
-    'META/releasetools.py',
-    'META/vendor_filesystem_config.txt',
-    'BOOT/*',
-    'DATA/*',
-    'ODM/*',
-    'OTA/android-info.txt',
-    'PREBUILT_IMAGES/*',
-    'RADIO/*',
-    'VENDOR/*',
-)
-
-# The merge config lists should not attempt to extract items from both
-# builds for any of the following partitions. The partitions in
-# SINGLE_BUILD_PARTITIONS should come entirely from a single build (either
-# framework or vendor, but not both).
-
-SINGLE_BUILD_PARTITIONS = (
-    'BOOT/',
-    'DATA/',
-    'ODM/',
-    'PRODUCT/',
-    'SYSTEM_EXT/',
-    'RADIO/',
-    'RECOVERY/',
-    'ROOT/',
-    'SYSTEM/',
-    'SYSTEM_OTHER/',
-    'VENDOR/',
-    'VENDOR_DLKM/',
-    'ODM_DLKM/',
-)
-
-
-def write_sorted_data(data, path):
-  """Writes the sorted contents of either a list or dict to file.
-
-  This function sorts the contents of the list or dict and then writes the
-  resulting sorted contents to a file specified by path.
-
-  Args:
-    data: The list or dict to sort and write.
-    path: Path to the file to write the sorted values to. The file at path will
-      be overridden if it exists.
-  """
-  with open(path, 'w') as output:
-    for entry in sorted(data):
-      out_str = '{}={}\n'.format(entry, data[entry]) if isinstance(
-          data, dict) else '{}\n'.format(entry)
-      output.write(out_str)
-
-
-def extract_items(target_files, target_files_temp_dir, extract_item_list):
-  """Extracts items from target files to temporary directory.
-
-  This function extracts from the specified target files zip archive into the
-  specified temporary directory, the items specified in the extract item list.
-
-  Args:
-    target_files: The target files zip archive from which to extract items.
-    target_files_temp_dir: The temporary directory where the extracted items
-      will land.
-    extract_item_list: A list of items to extract.
-  """
-
-  logger.info('extracting from %s', target_files)
-
-  # Filter the extract_item_list to remove any items that do not exist in the
-  # zip file. Otherwise, the extraction step will fail.
-
-  with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zipfile:
-    target_files_namelist = target_files_zipfile.namelist()
-
-  filtered_extract_item_list = []
-  for pattern in extract_item_list:
-    matching_namelist = fnmatch.filter(target_files_namelist, pattern)
-    if not matching_namelist:
-      logger.warning('no match for %s', pattern)
-    else:
-      filtered_extract_item_list.append(pattern)
-
-  # Extract from target_files into target_files_temp_dir the
-  # filtered_extract_item_list.
-
-  common.UnzipToDir(target_files, target_files_temp_dir,
-                    filtered_extract_item_list)
-
-
-def copy_items(from_dir, to_dir, patterns):
-  """Similar to extract_items() except uses an input dir instead of zip."""
-  file_paths = []
-  for dirpath, _, filenames in os.walk(from_dir):
-    file_paths.extend(
-        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
-        for filename in filenames)
-
-  filtered_file_paths = set()
-  for pattern in patterns:
-    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
-
-  for file_path in filtered_file_paths:
-    original_file_path = os.path.join(from_dir, file_path)
-    copied_file_path = os.path.join(to_dir, file_path)
-    copied_file_dir = os.path.dirname(copied_file_path)
-    if not os.path.exists(copied_file_dir):
-      os.makedirs(copied_file_dir)
-    if os.path.islink(original_file_path):
-      os.symlink(os.readlink(original_file_path), copied_file_path)
-    else:
-      shutil.copyfile(original_file_path, copied_file_path)
-
-
-def validate_config_lists(framework_item_list, framework_misc_info_keys,
-                          vendor_item_list):
-  """Performs validations on the merge config lists.
-
-  Args:
-    framework_item_list: The list of items to extract from the partial framework
-      target files package as is.
-    framework_misc_info_keys: A list of keys to obtain from the framework
-      instance of META/misc_info.txt. The remaining keys should come from the
-      vendor instance.
-    vendor_item_list: The list of items to extract from the partial vendor
-      target files package as is.
-
-  Returns:
-    False if a validation fails, otherwise true.
-  """
-  has_error = False
-
-  default_combined_item_set = set(DEFAULT_FRAMEWORK_ITEM_LIST)
-  default_combined_item_set.update(DEFAULT_VENDOR_ITEM_LIST)
-
-  combined_item_set = set(framework_item_list)
-  combined_item_set.update(vendor_item_list)
-
-  # Check that the merge config lists are not missing any item specified
-  # by the default config lists.
-  difference = default_combined_item_set.difference(combined_item_set)
-  if difference:
-    logger.error('Missing merge config items: %s', list(difference))
-    logger.error('Please ensure missing items are in either the '
-                 'framework-item-list or vendor-item-list files provided to '
-                 'this script.')
-    has_error = True
-
-  # Check that partitions only come from one input.
-  for partition in SINGLE_BUILD_PARTITIONS:
-    image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
-    in_framework = (
-        any(item.startswith(partition) for item in framework_item_list) or
-        image_path in framework_item_list)
-    in_vendor = (
-        any(item.startswith(partition) for item in vendor_item_list) or
-        image_path in vendor_item_list)
-    if in_framework and in_vendor:
-      logger.error(
-          'Cannot extract items from %s for both the framework and vendor'
-          ' builds. Please ensure only one merge config item list'
-          ' includes %s.', partition, partition)
-      has_error = True
-
-  if ('dynamic_partition_list'
-      in framework_misc_info_keys) or ('super_partition_groups'
-                                       in framework_misc_info_keys):
-    logger.error('Dynamic partition misc info keys should come from '
-                 'the vendor instance of META/misc_info.txt.')
-    has_error = True
-
-  return not has_error
-
-
-def process_ab_partitions_txt(framework_target_files_temp_dir,
-                              vendor_target_files_temp_dir,
-                              output_target_files_temp_dir):
-  """Performs special processing for META/ab_partitions.txt.
-
-  This function merges the contents of the META/ab_partitions.txt files from the
-  framework directory and the vendor directory, placing the merged result in the
-  output directory. The precondition in that the files are already extracted.
-  The post condition is that the output META/ab_partitions.txt contains the
-  merged content. The format for each ab_partitions.txt is one partition name
-  per line. The output file contains the union of the partition names.
-
-  Args:
-    framework_target_files_temp_dir: The name of a directory containing the
-      special items extracted from the framework target files package.
-    vendor_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the vendor target files package.
-    output_target_files_temp_dir: The name of a directory that will be used to
-      create the output target files package after all the special cases are
-      processed.
-  """
-
-  framework_ab_partitions_txt = os.path.join(framework_target_files_temp_dir,
-                                             'META', 'ab_partitions.txt')
-
-  vendor_ab_partitions_txt = os.path.join(vendor_target_files_temp_dir, 'META',
-                                          'ab_partitions.txt')
-
-  with open(framework_ab_partitions_txt) as f:
-    framework_ab_partitions = f.read().splitlines()
-
-  with open(vendor_ab_partitions_txt) as f:
-    vendor_ab_partitions = f.read().splitlines()
-
-  output_ab_partitions = set(framework_ab_partitions + vendor_ab_partitions)
-
-  output_ab_partitions_txt = os.path.join(output_target_files_temp_dir, 'META',
-                                          'ab_partitions.txt')
-
-  write_sorted_data(data=output_ab_partitions, path=output_ab_partitions_txt)
-
-
-def process_misc_info_txt(framework_target_files_temp_dir,
-                          vendor_target_files_temp_dir,
-                          output_target_files_temp_dir,
-                          framework_misc_info_keys):
-  """Performs special processing for META/misc_info.txt.
-
-  This function merges the contents of the META/misc_info.txt files from the
-  framework directory and the vendor directory, placing the merged result in the
-  output directory. The precondition in that the files are already extracted.
-  The post condition is that the output META/misc_info.txt contains the merged
-  content.
-
-  Args:
-    framework_target_files_temp_dir: The name of a directory containing the
-      special items extracted from the framework target files package.
-    vendor_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the vendor target files package.
-    output_target_files_temp_dir: The name of a directory that will be used to
-      create the output target files package after all the special cases are
-      processed.
-    framework_misc_info_keys: A list of keys to obtain from the framework
-      instance of META/misc_info.txt. The remaining keys should come from the
-      vendor instance.
-  """
-
-  misc_info_path = ['META', 'misc_info.txt']
-  framework_dict = common.LoadDictionaryFromFile(
-      os.path.join(framework_target_files_temp_dir, *misc_info_path))
-
-  # We take most of the misc info from the vendor target files.
-
-  merged_dict = common.LoadDictionaryFromFile(
-      os.path.join(vendor_target_files_temp_dir, *misc_info_path))
-
-  # Replace certain values in merged_dict with values from
-  # framework_dict.
-
-  for key in framework_misc_info_keys:
-    merged_dict[key] = framework_dict[key]
-
-  # Merge misc info keys used for Dynamic Partitions.
-  if (merged_dict.get('use_dynamic_partitions')
-      == 'true') and (framework_dict.get('use_dynamic_partitions') == 'true'):
-    merged_dynamic_partitions_dict = common.MergeDynamicPartitionInfoDicts(
-        framework_dict=framework_dict, vendor_dict=merged_dict)
-    merged_dict.update(merged_dynamic_partitions_dict)
-    # Ensure that add_img_to_target_files rebuilds super split images for
-    # devices that retrofit dynamic partitions. This flag may have been set to
-    # false in the partial builds to prevent duplicate building of super.img.
-    merged_dict['build_super_partition'] = 'true'
-
-  # If AVB is enabled then ensure that we build vbmeta.img.
-  # Partial builds with AVB enabled may set PRODUCT_BUILD_VBMETA_IMAGE=false to
-  # skip building an incomplete vbmeta.img.
-  if merged_dict.get('avb_enable') == 'true':
-    merged_dict['avb_building_vbmeta_image'] = 'true'
-
-  # Replace <image>_selinux_fc values with framework or vendor file_contexts.bin
-  # depending on which dictionary the key came from.
-  # Only the file basename is required because all selinux_fc properties are
-  # replaced with the full path to the file under META/ when misc_info.txt is
-  # loaded from target files for repacking. See common.py LoadInfoDict().
-  for key in merged_dict:
-    if key.endswith('_selinux_fc'):
-      merged_dict[key] = 'vendor_file_contexts.bin'
-  for key in framework_dict:
-    if key.endswith('_selinux_fc'):
-      merged_dict[key] = 'framework_file_contexts.bin'
-
-  output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
-                                      'misc_info.txt')
-  write_sorted_data(data=merged_dict, path=output_misc_info_txt)
-
-
-def process_dynamic_partitions_info_txt(framework_target_files_dir,
-                                        vendor_target_files_dir,
-                                        output_target_files_dir):
-  """Performs special processing for META/dynamic_partitions_info.txt.
-
-  This function merges the contents of the META/dynamic_partitions_info.txt
-  files from the framework directory and the vendor directory, placing the
-  merged result in the output directory.
-
-  This function does nothing if META/dynamic_partitions_info.txt from the vendor
-  directory does not exist.
-
-  Args:
-    framework_target_files_dir: The name of a directory containing the special
-      items extracted from the framework target files package.
-    vendor_target_files_dir: The name of a directory containing the special
-      items extracted from the vendor target files package.
-    output_target_files_dir: The name of a directory that will be used to create
-      the output target files package after all the special cases are processed.
-  """
-
-  if not os.path.exists(
-      os.path.join(vendor_target_files_dir, 'META',
-                   'dynamic_partitions_info.txt')):
-    return
-
-  dynamic_partitions_info_path = ['META', 'dynamic_partitions_info.txt']
-
-  framework_dynamic_partitions_dict = common.LoadDictionaryFromFile(
-      os.path.join(framework_target_files_dir, *dynamic_partitions_info_path))
-  vendor_dynamic_partitions_dict = common.LoadDictionaryFromFile(
-      os.path.join(vendor_target_files_dir, *dynamic_partitions_info_path))
-
-  merged_dynamic_partitions_dict = common.MergeDynamicPartitionInfoDicts(
-      framework_dict=framework_dynamic_partitions_dict,
-      vendor_dict=vendor_dynamic_partitions_dict)
-
-  output_dynamic_partitions_info_txt = os.path.join(
-      output_target_files_dir, 'META', 'dynamic_partitions_info.txt')
-  write_sorted_data(
-      data=merged_dynamic_partitions_dict,
-      path=output_dynamic_partitions_info_txt)
-
-
-def item_list_to_partition_set(item_list):
-  """Converts a target files item list to a partition set.
-
-  The item list contains items that might look like 'SYSTEM/*' or 'VENDOR/*' or
-  'OTA/android-info.txt'. Items that end in '/*' are assumed to match entire
-  directories where 'SYSTEM' or 'VENDOR' is a directory name that identifies the
-  contents of a partition of the same name. Other items in the list, such as the
-  'OTA' example contain metadata. This function iterates such a list, returning
-  a set that contains the partition entries.
-
-  Args:
-    item_list: A list of items in a target files package.
-
-  Returns:
-    A set of partitions extracted from the list of items.
-  """
-
-  partition_set = set()
-
-  for item in item_list:
-    match = PARTITION_ITEM_PATTERN.search(item.strip())
-    partition_tag = match.group(1).lower() if match else None
-
-    if partition_tag:
-      partition_set.add(partition_tag)
-
-  return partition_set
-
-
-def process_apex_keys_apk_certs_common(framework_target_files_dir,
-                                       vendor_target_files_dir,
-                                       output_target_files_dir,
-                                       framework_partition_set,
-                                       vendor_partition_set, file_name):
-  """Performs special processing for META/apexkeys.txt or META/apkcerts.txt.
-
-  This function merges the contents of the META/apexkeys.txt or
-  META/apkcerts.txt files from the framework directory and the vendor directory,
-  placing the merged result in the output directory. The precondition in that
-  the files are already extracted. The post condition is that the output
-  META/apexkeys.txt or META/apkcerts.txt contains the merged content.
-
-  Args:
-    framework_target_files_dir: The name of a directory containing the special
-      items extracted from the framework target files package.
-    vendor_target_files_dir: The name of a directory containing the special
-      items extracted from the vendor target files package.
-    output_target_files_dir: The name of a directory that will be used to create
-      the output target files package after all the special cases are processed.
-    framework_partition_set: Partitions that are considered framework
-      partitions. Used to filter apexkeys.txt and apkcerts.txt.
-    vendor_partition_set: Partitions that are considered vendor partitions. Used
-      to filter apexkeys.txt and apkcerts.txt.
-    file_name: The name of the file to merge. One of apkcerts.txt or
-      apexkeys.txt.
-  """
-
-  def read_helper(d):
-    temp = {}
-    file_path = os.path.join(d, 'META', file_name)
-    with open(file_path) as f:
-      for line in f:
-        if line.strip():
-          name = line.split()[0]
-          match = MODULE_KEY_PATTERN.search(name)
-          temp[match.group(1)] = line.strip()
-    return temp
-
-  framework_dict = read_helper(framework_target_files_dir)
-  vendor_dict = read_helper(vendor_target_files_dir)
-  merged_dict = {}
-
-  def filter_into_merged_dict(item_dict, partition_set):
-    for key, value in item_dict.items():
-      match = PARTITION_TAG_PATTERN.search(value)
-
-      if match is None:
-        raise ValueError('Entry missing partition tag: %s' % value)
-
-      partition_tag = match.group(1)
-
-      if partition_tag in partition_set:
-        if key in merged_dict:
-          if OPTIONS.allow_duplicate_apkapex_keys:
-            # TODO(b/150582573) Always raise on duplicates.
-            logger.warning('Duplicate key %s' % key)
-            continue
-          else:
-            raise ValueError('Duplicate key %s' % key)
-
-        merged_dict[key] = value
-
-  filter_into_merged_dict(framework_dict, framework_partition_set)
-  filter_into_merged_dict(vendor_dict, vendor_partition_set)
-
-  output_file = os.path.join(output_target_files_dir, 'META', file_name)
-
-  # The following code is similar to write_sorted_data, but different enough
-  # that we couldn't use that function. We need the output to be sorted by the
-  # basename of the apex/apk (without the ".apex" or ".apk" suffix). This
-  # allows the sort to be consistent with the framework/vendor input data and
-  # eases comparison of input data with merged data.
-  with open(output_file, 'w') as output:
-    for key in sorted(merged_dict.keys()):
-      out_str = merged_dict[key] + '\n'
-      output.write(out_str)
-
-
-def copy_file_contexts(framework_target_files_dir, vendor_target_files_dir,
-                       output_target_files_dir):
-  """Creates named copies of each build's file_contexts.bin in output META/."""
-  framework_fc_path = os.path.join(framework_target_files_dir, 'META',
-                                   'framework_file_contexts.bin')
-  if not os.path.exists(framework_fc_path):
-    framework_fc_path = os.path.join(framework_target_files_dir, 'META',
-                                     'file_contexts.bin')
-    if not os.path.exists(framework_fc_path):
-      raise ValueError('Missing framework file_contexts.bin.')
-  shutil.copyfile(
-      framework_fc_path,
-      os.path.join(output_target_files_dir, 'META',
-                   'framework_file_contexts.bin'))
-
-  vendor_fc_path = os.path.join(vendor_target_files_dir, 'META',
-                                'vendor_file_contexts.bin')
-  if not os.path.exists(vendor_fc_path):
-    vendor_fc_path = os.path.join(vendor_target_files_dir, 'META',
-                                  'file_contexts.bin')
-    if not os.path.exists(vendor_fc_path):
-      raise ValueError('Missing vendor file_contexts.bin.')
-  shutil.copyfile(
-      vendor_fc_path,
-      os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
-
-
-def compile_split_sepolicy(product_out, partition_map):
-  """Uses secilc to compile a split sepolicy file.
-
-  Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
-
-  Args:
-    product_out: PRODUCT_OUT directory, containing partition directories.
-    partition_map: A map of partition name -> relative path within product_out.
-
-  Returns:
-    A command list that can be executed to create the compiled sepolicy.
-  """
-
-  def get_file(partition, path):
-    if partition not in partition_map:
-      logger.warning('Cannot load SEPolicy files for missing partition %s',
-                     partition)
-      return None
-    return os.path.join(product_out, partition_map[partition], path)
-
-  # Load the kernel sepolicy version from the FCM. This is normally provided
-  # directly to selinux.cpp as a build flag, but is also available in this file.
-  fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
-  if not fcm_file or not os.path.exists(fcm_file):
-    raise ExternalError('Missing required file for loading sepolicy: %s', fcm)
-  kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
-      'sepolicy/kernel-sepolicy-version').text
-
-  # Load the vendor's plat sepolicy version. This is the version used for
-  # locating sepolicy mapping files.
-  vendor_plat_version_file = get_file('vendor',
-                                      'etc/selinux/plat_sepolicy_vers.txt')
-  if not vendor_plat_version_file or not os.path.exists(
-      vendor_plat_version_file):
-    raise ExternalError('Missing required sepolicy file %s',
-                        vendor_plat_version_file)
-  with open(vendor_plat_version_file) as f:
-    vendor_plat_version = f.read().strip()
-
-  # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
-  cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
-  cmd.extend(['-c', kernel_sepolicy_version])
-  cmd.extend(['-o', os.path.join(product_out, 'META/combined_sepolicy')])
-  cmd.extend(['-f', '/dev/null'])
-
-  required_policy_files = (
-      ('system', 'etc/selinux/plat_sepolicy.cil'),
-      ('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
-      ('vendor', 'etc/selinux/vendor_sepolicy.cil'),
-      ('vendor', 'etc/selinux/plat_pub_versioned.cil'),
-  )
-  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
-                     required_policy_files)):
-    if not policy or not os.path.exists(policy):
-      raise ExternalError('Missing required sepolicy file %s', policy)
-    cmd.append(policy)
-
-  optional_policy_files = (
-      ('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
-      ('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
-      ('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
-      ('product', 'etc/selinux/product_sepolicy.cil'),
-      ('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
-      ('odm', 'etc/selinux/odm_sepolicy.cil'),
-  )
-  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
-                     optional_policy_files)):
-    if policy and os.path.exists(policy):
-      cmd.append(policy)
-
-  return cmd
-
-
-def validate_merged_apex_info(output_target_files_dir, partitions):
-  """Validates the APEX files in the merged target files directory.
-
-  Checks the APEX files in all possible preinstalled APEX directories.
-  Depends on the <partition>/apex/* APEX files within partitions.
-
-  Args:
-    output_target_files_dir: Output directory containing merged partition
-      directories.
-    partitions: A list of all the partitions in the output directory.
-
-  Raises:
-    RuntimeError: if apex_utils fails to parse any APEX file.
-    ExternalError: if the same APEX package is provided by multiple partitions.
-  """
-  apex_packages = set()
-
-  apex_partitions = ('system', 'system_ext', 'product', 'vendor')
-  for partition in filter(lambda p: p in apex_partitions, partitions):
-    apex_info = apex_utils.GetApexInfoFromTargetFiles(
-        output_target_files_dir, partition, compressed_only=False)
-    partition_apex_packages = set([info.package_name for info in apex_info])
-    duplicates = apex_packages.intersection(partition_apex_packages)
-    if duplicates:
-      raise ExternalError(
-          'Duplicate APEX packages found in multiple partitions: %s' %
-          ' '.join(duplicates))
-    apex_packages.update(partition_apex_packages)
-
-
-def generate_care_map(partitions, output_target_files_dir):
-  """Generates a merged META/care_map.pb file in the output target files dir.
-
-  Depends on the info dict from META/misc_info.txt, as well as built images
-  within IMAGES/.
-
-  Args:
-    partitions: A list of partitions to potentially include in the care map.
-    output_target_files_dir: The name of a directory that will be used to create
-      the output target files package after all the special cases are processed.
-  """
-  OPTIONS.info_dict = common.LoadInfoDict(output_target_files_dir)
-  partition_image_map = {}
-  for partition in partitions:
-    image_path = os.path.join(output_target_files_dir, 'IMAGES',
-                              '{}.img'.format(partition))
-    if os.path.exists(image_path):
-      partition_image_map[partition] = image_path
-      # Regenerated images should have their image_size property already set.
-      image_size_prop = '{}_image_size'.format(partition)
-      if image_size_prop not in OPTIONS.info_dict:
-        # Images copied directly from input target files packages will need
-        # their image sizes calculated.
-        partition_size = sparse_img.GetImagePartitionSize(image_path)
-        image_props = build_image.ImagePropFromGlobalDict(
-            OPTIONS.info_dict, partition)
-        verity_image_builder = verity_utils.CreateVerityImageBuilder(
-            image_props)
-        image_size = verity_image_builder.CalculateMaxImageSize(partition_size)
-        OPTIONS.info_dict[image_size_prop] = image_size
-
-  AddCareMapForAbOta(
-      os.path.join(output_target_files_dir, 'META', 'care_map.pb'),
-      PARTITIONS_WITH_CARE_MAP, partition_image_map)
-
-
-def process_special_cases(temp_dir, framework_meta, vendor_meta,
-                          output_target_files_temp_dir,
-                          framework_misc_info_keys, framework_partition_set,
-                          vendor_partition_set, framework_dexpreopt_tools,
-                          framework_dexpreopt_config, vendor_dexpreopt_config):
-  """Performs special-case processing for certain target files items.
-
-  Certain files in the output target files package require special-case
-  processing. This function performs all that special-case processing.
-
-  Args:
-    temp_dir: Location containing an 'output' directory where target files have
-      been extracted, e.g. <temp_dir>/output/SYSTEM, <temp_dir>/output/IMAGES, etc.
-    framework_meta: The name of a directory containing the special items
-      extracted from the framework target files package.
-    vendor_meta: The name of a directory containing the special items
-      extracted from the vendor target files package.
-    output_target_files_temp_dir: The name of a directory that will be used to
-      create the output target files package after all the special cases are
-      processed.
-    framework_misc_info_keys: A list of keys to obtain from the framework
-      instance of META/misc_info.txt. The remaining keys should come from the
-      vendor instance.
-    framework_partition_set: Partitions that are considered framework
-      partitions. Used to filter apexkeys.txt and apkcerts.txt.
-    vendor_partition_set: Partitions that are considered vendor partitions. Used
-      to filter apexkeys.txt and apkcerts.txt.
-
-    The following are only used if dexpreopt is applied:
-
-    framework_dexpreopt_tools: Location of dexpreopt_tools.zip.
-    framework_dexpreopt_config: Location of framework's dexpreopt_config.zip.
-    vendor_dexpreopt_config: Location of vendor's dexpreopt_config.zip.
-  """
-
-  if 'ab_update' in framework_misc_info_keys:
-    process_ab_partitions_txt(
-        framework_target_files_temp_dir=framework_meta,
-        vendor_target_files_temp_dir=vendor_meta,
-        output_target_files_temp_dir=output_target_files_temp_dir)
-
-  copy_file_contexts(
-      framework_target_files_dir=framework_meta,
-      vendor_target_files_dir=vendor_meta,
-      output_target_files_dir=output_target_files_temp_dir)
-
-  process_misc_info_txt(
-      framework_target_files_temp_dir=framework_meta,
-      vendor_target_files_temp_dir=vendor_meta,
-      output_target_files_temp_dir=output_target_files_temp_dir,
-      framework_misc_info_keys=framework_misc_info_keys)
-
-  process_dynamic_partitions_info_txt(
-      framework_target_files_dir=framework_meta,
-      vendor_target_files_dir=vendor_meta,
-      output_target_files_dir=output_target_files_temp_dir)
-
-  process_apex_keys_apk_certs_common(
-      framework_target_files_dir=framework_meta,
-      vendor_target_files_dir=vendor_meta,
-      output_target_files_dir=output_target_files_temp_dir,
-      framework_partition_set=framework_partition_set,
-      vendor_partition_set=vendor_partition_set,
-      file_name='apkcerts.txt')
-
-  process_apex_keys_apk_certs_common(
-      framework_target_files_dir=framework_meta,
-      vendor_target_files_dir=vendor_meta,
-      output_target_files_dir=output_target_files_temp_dir,
-      framework_partition_set=framework_partition_set,
-      vendor_partition_set=vendor_partition_set,
-      file_name='apexkeys.txt')
-
-  process_dexopt(
-      temp_dir=temp_dir,
-      framework_meta=framework_meta,
-      vendor_meta=vendor_meta,
-      output_target_files_temp_dir=output_target_files_temp_dir,
-      framework_dexpreopt_tools=framework_dexpreopt_tools,
-      framework_dexpreopt_config=framework_dexpreopt_config,
-      vendor_dexpreopt_config=vendor_dexpreopt_config)
-
-
-def process_dexopt(temp_dir, framework_meta, vendor_meta,
-                   output_target_files_temp_dir,
-                   framework_dexpreopt_tools, framework_dexpreopt_config,
-                   vendor_dexpreopt_config):
-  """If needed, generates dexopt files for vendor apps.
-
-  Args:
-    temp_dir: Location containing an 'output' directory where target files have
-      been extracted, e.g. <temp_dir>/output/SYSTEM, <temp_dir>/output/IMAGES, etc.
-    framework_meta: The name of a directory containing the special items
-      extracted from the framework target files package.
-    vendor_meta: The name of a directory containing the special items extracted
-      from the vendor target files package.
-    output_target_files_temp_dir: The name of a directory that will be used to
-      create the output target files package after all the special cases are
-      processed.
-    framework_dexpreopt_tools: Location of dexpreopt_tools.zip.
-    framework_dexpreopt_config: Location of framework's dexpreopt_config.zip.
-    vendor_dexpreopt_config: Location of vendor's dexpreopt_config.zip.
-  """
-  # Load vendor and framework META/misc_info.txt.
-  misc_info_path = ['META', 'misc_info.txt']
-  vendor_misc_info_dict = common.LoadDictionaryFromFile(
-      os.path.join(vendor_meta, *misc_info_path))
-
-  if (vendor_misc_info_dict.get('building_with_vsdk') != 'true' or
-      framework_dexpreopt_tools is None or
-      framework_dexpreopt_config is None or
-      vendor_dexpreopt_config is None):
-    return
-
-  logger.info('applying dexpreopt')
-
-  # The directory structure to apply dexpreopt is:
-  #
-  # <temp_dir>/
-  #     framework_meta/
-  #         META/
-  #     vendor_meta/
-  #         META/
-  #     output/
-  #         SYSTEM/
-  #         VENDOR/
-  #         IMAGES/
-  #         <other items extracted from system and vendor target files>
-  #     tools/
-  #         <contents of dexpreopt_tools.zip>
-  #     system_config/
-  #         <contents of system dexpreopt_config.zip>
-  #     vendor_config/
-  #         <contents of vendor dexpreopt_config.zip>
-  #     system -> output/SYSTEM
-  #     vendor -> output/VENDOR
-  #     apex -> output/SYSTEM/apex (only for flattened APEX builds)
-  #     apex/ (extracted updatable APEX)
-  #         <apex 1>/
-  #             ...
-  #         <apex 2>/
-  #             ...
-  #         ...
-  #     out/dex2oat_result/vendor/
-  #         <app>
-  #             oat/arm64/
-  #                 package.vdex
-  #                 package.odex
-  #         <priv-app>
-  #             oat/arm64/
-  #                 package.vdex
-  #                 package.odex
-  dexpreopt_tools_files_temp_dir = os.path.join(temp_dir, 'tools')
-  dexpreopt_framework_config_files_temp_dir = os.path.join(temp_dir, 'system_config')
-  dexpreopt_vendor_config_files_temp_dir = os.path.join(temp_dir, 'vendor_config')
-
-  extract_items(
-      target_files=OPTIONS.framework_dexpreopt_tools,
-      target_files_temp_dir=dexpreopt_tools_files_temp_dir,
-      extract_item_list=('*',))
-  extract_items(
-      target_files=OPTIONS.framework_dexpreopt_config,
-      target_files_temp_dir=dexpreopt_framework_config_files_temp_dir,
-      extract_item_list=('*',))
-  extract_items(
-      target_files=OPTIONS.vendor_dexpreopt_config,
-      target_files_temp_dir=dexpreopt_vendor_config_files_temp_dir,
-      extract_item_list=('*',))
-
-  os.symlink(os.path.join(output_target_files_temp_dir, "SYSTEM"),
-             os.path.join(temp_dir, "system"))
-  os.symlink(os.path.join(output_target_files_temp_dir, "VENDOR"),
-             os.path.join(temp_dir, "vendor"))
-
-  # The directory structure for flatteded APEXes is:
-  #
-  # SYSTEM
-  #     apex
-  #         <APEX name, e.g., com.android.wifi>
-  #             apex_manifest.pb
-  #             apex_pubkey
-  #             etc/
-  #             javalib/
-  #             lib/
-  #             lib64/
-  #             priv-app/
-  #
-  # The directory structure for updatable APEXes is:
-  #
-  # SYSTEM
-  #     apex
-  #         com.android.adbd.apex
-  #         com.android.appsearch.apex
-  #         com.android.art.apex
-  #         ...
-  apex_root = os.path.join(output_target_files_temp_dir, "SYSTEM", "apex")
-  framework_misc_info_dict = common.LoadDictionaryFromFile(
-      os.path.join(framework_meta, *misc_info_path))
-
-  # Check for flattended versus updatable APEX.
-  if framework_misc_info_dict.get('target_flatten_apex') == 'false':
-    # Extract APEX.
-    logging.info('extracting APEX')
-
-    apex_extract_root_dir = os.path.join(temp_dir, 'apex')
-    os.makedirs(apex_extract_root_dir)
-
-    for apex in (glob.glob(os.path.join(apex_root, '*.apex')) +
-                 glob.glob(os.path.join(apex_root, '*.capex'))):
-      logging.info('  apex: %s', apex)
-      # deapexer is in the same directory as the merge_target_files binary extracted
-      # from otatools.zip.
-      apex_json_info = subprocess.check_output(['deapexer', 'info', apex])
-      logging.info('    info: %s', apex_json_info)
-      apex_info = json.loads(apex_json_info)
-      apex_name = apex_info['name']
-      logging.info('    name: %s', apex_name)
-
-      apex_extract_dir = os.path.join(apex_extract_root_dir, apex_name)
-      os.makedirs(apex_extract_dir)
-
-      # deapexer uses debugfs_static, which is part of otatools.zip.
-      command = [
-          'deapexer',
-          '--debugfs_path',
-          'debugfs_static',
-          'extract',
-          apex,
-          apex_extract_dir,
-      ]
-      logging.info('    running %s', command)
-      subprocess.check_call(command)
-  else:
-    # Flattened APEXes don't need to be extracted since they have the necessary
-    # directory structure.
-    os.symlink(os.path.join(apex_root), os.path.join(temp_dir, 'apex'))
-
-  # Modify system config to point to the tools that have been extracted.
-  # Absolute or .. paths are not allowed  by the dexpreopt_gen tool in
-  # dexpreopt_soong.config.
-  dexpreopt_framework_soon_config = os.path.join(
-      dexpreopt_framework_config_files_temp_dir, 'dexpreopt_soong.config')
-  with open(dexpreopt_framework_soon_config, 'w') as f:
-    dexpreopt_soong_config = {
-        'Profman': 'tools/profman',
-        'Dex2oat': 'tools/dex2oatd',
-        'Aapt': 'tools/aapt2',
-        'SoongZip': 'tools/soong_zip',
-        'Zip2zip': 'tools/zip2zip',
-        'ManifestCheck': 'tools/manifest_check',
-        'ConstructContext': 'tools/construct_context',
-    }
-    json.dump(dexpreopt_soong_config, f)
-
-  # TODO(b/188179859): Make *dex location configurable to vendor or system_other.
-  use_system_other_odex = False
-
-  if use_system_other_odex:
-    dex_img = 'SYSTEM_OTHER'
-  else:
-    dex_img = 'VENDOR'
-    # Open vendor_filesystem_config to append the items generated by dexopt.
-    vendor_file_system_config = open(
-        os.path.join(temp_dir, 'output', 'META', 'vendor_filesystem_config.txt'),
-        'a')
-
-  # Dexpreopt vendor apps.
-  dexpreopt_config_suffix = '_dexpreopt.config'
-  for config in glob.glob(os.path.join(
-      dexpreopt_vendor_config_files_temp_dir, '*' + dexpreopt_config_suffix)):
-    app = os.path.basename(config)[:-len(dexpreopt_config_suffix)]
-    logging.info('dexpreopt config: %s %s', config, app)
-
-    apk_dir = 'app'
-    apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
-    if not os.path.exists(apk_path):
-      apk_dir = 'priv-app'
-      apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
-      if not os.path.exists(apk_path):
-        logging.warning('skipping dexpreopt for %s, no apk found in vendor/app '
-                        'or vendor/priv-app', app)
-        continue
-
-    # Generate dexpreopting script. Note 'out_dir' is not the output directory
-    # where the script is generated, but the OUT_DIR at build time referenced
-    # in the dexpreot config files, e.g., "out/.../core-oj.jar", so the tool knows
-    # how to adjust the path.
-    command = [
-        os.path.join(dexpreopt_tools_files_temp_dir, 'dexpreopt_gen'),
-        '-global',
-        os.path.join(dexpreopt_framework_config_files_temp_dir, 'dexpreopt.config'),
-        '-global_soong',
-        os.path.join(
-            dexpreopt_framework_config_files_temp_dir, 'dexpreopt_soong.config'),
-        '-module',
-        config,
-        '-dexpreopt_script',
-        'dexpreopt_app.sh',
-        '-out_dir',
-        'out',
-        '-base_path',
-        '.',
-        '--uses_target_files',
-    ]
-
-    # Run the command from temp_dir so all tool paths are its descendants.
-    logging.info("running %s", command)
-    subprocess.check_call(command, cwd = temp_dir)
-
-    # Call the generated script.
-    command = ['sh', 'dexpreopt_app.sh', apk_path]
-    logging.info("running %s", command)
-    subprocess.check_call(command, cwd = temp_dir)
-
-    # Output files are in:
-    #
-    # <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.vdex
-    # <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.odex
-    # <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.vdex
-    # <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.odex
-    #
-    # Copy the files to their destination. The structure of system_other is:
-    #
-    # system_other/
-    #     system-other-odex-marker
-    #     system/
-    #         app/
-    #             <app>/oat/arm64/
-    #                 <app>.odex
-    #                 <app>.vdex
-    #             ...
-    #         priv-app/
-    #             <app>/oat/arm64/
-    #                 <app>.odex
-    #                 <app>.vdex
-    #             ...
-
-    # TODO(b/188179859): Support for other architectures.
-    arch = 'arm64'
-
-    dex_destination = os.path.join(temp_dir, 'output', dex_img, apk_dir, app, 'oat', arch)
-    os.makedirs(dex_destination)
-    dex2oat_path = os.path.join(
-        temp_dir, 'out', 'dex2oat_result', 'vendor', apk_dir, app, 'oat', arch)
-    shutil.copy(os.path.join(dex2oat_path, 'package.vdex'),
-                os.path.join(dex_destination, app + '.vdex'))
-    shutil.copy(os.path.join(dex2oat_path, 'package.odex'),
-                os.path.join(dex_destination, app + '.odex'))
-
-    # Append entries to vendor_file_system_config.txt, such as:
-    #
-    # vendor/app/<app>/oat 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
-    # vendor/app/<app>/oat/arm64 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
-    # vendor/app/<app>/oat/arm64/<app>.odex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
-    # vendor/app/<app>/oat/arm64/<app>.vdex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
-    if not use_system_other_odex:
-      vendor_app_prefix = 'vendor/' + apk_dir + '/' + app + '/oat'
-      selabel = 'selabel=u:object_r:vendor_app_file:s0 capabilities=0x0'
-      vendor_file_system_config.writelines([
-          vendor_app_prefix + ' 0 2000 755 ' + selabel + '\n',
-          vendor_app_prefix + '/' + arch + ' 0 2000 755 ' + selabel + '\n',
-          vendor_app_prefix + '/' + arch + '/' + app + '.odex 0 0 644 ' + selabel + '\n',
-          vendor_app_prefix + '/' + arch + '/' + app + '.vdex 0 0 644 ' + selabel + '\n',
-      ])
-
-  if not use_system_other_odex:
-    vendor_file_system_config.close()
-    # Delete vendor.img so that it will be regenerated.
-    # TODO(b/188179859): Rebuilding a vendor image in GRF mode (e.g., T(framework)
-    #                    and S(vendor) may require logic similar to that in
-    #                    rebuild_image_with_sepolicy.
-    vendor_img = os.path.join(output_target_files_temp_dir, 'IMAGES', 'vendor.img')
-    if os.path.exists(vendor_img):
-      logging.info('Deleting %s', vendor_img)
-      os.remove(vendor_img)
-
-
-def create_merged_package(temp_dir, framework_target_files, framework_item_list,
-                          vendor_target_files, vendor_item_list,
-                          framework_misc_info_keys, rebuild_recovery,
-                          framework_dexpreopt_tools, framework_dexpreopt_config,
-                          vendor_dexpreopt_config):
-  """Merges two target files packages into one target files structure.
-
-  Args:
-    temp_dir: The name of a directory we use when we extract items from the
-      input target files packages, and also a scratch directory that we use for
-      temporary files.
-    framework_target_files: The name of the zip archive containing the framework
-      partial target files package.
-    framework_item_list: The list of items to extract from the partial framework
-      target files package as is, meaning these items will land in the output
-      target files package exactly as they appear in the input partial framework
-      target files package.
-    vendor_target_files: The name of the zip archive containing the vendor
-      partial target files package.
-    vendor_item_list: The list of items to extract from the partial vendor
-      target files package as is, meaning these items will land in the output
-      target files package exactly as they appear in the input partial vendor
-      target files package.
-    framework_misc_info_keys: A list of keys to obtain from the framework
-      instance of META/misc_info.txt. The remaining keys should come from the
-      vendor instance.
-    rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
-      devices and write it to the system image.
-
-    The following are only used if dexpreopt is applied:
-
-    framework_dexpreopt_tools: Location of dexpreopt_tools.zip.
-    framework_dexpreopt_config: Location of framework's dexpreopt_config.zip.
-    vendor_dexpreopt_config: Location of vendor's dexpreopt_config.zip.
-
-  Returns:
-    Path to merged package under temp directory.
-  """
-  # Extract "as is" items from the input framework and vendor partial target
-  # files packages directly into the output temporary directory, since these items
-  # do not need special case processing.
-
-  output_target_files_temp_dir = os.path.join(temp_dir, 'output')
-  extract_items(
-      target_files=framework_target_files,
-      target_files_temp_dir=output_target_files_temp_dir,
-      extract_item_list=framework_item_list)
-  extract_items(
-      target_files=vendor_target_files,
-      target_files_temp_dir=output_target_files_temp_dir,
-      extract_item_list=vendor_item_list)
-
-  # Perform special case processing on META/* items.
-  # After this function completes successfully, all the files we need to create
-  # the output target files package are in place.
-  framework_meta = os.path.join(temp_dir, 'framework_meta')
-  vendor_meta = os.path.join(temp_dir, 'vendor_meta')
-  extract_items(
-      target_files=framework_target_files,
-      target_files_temp_dir=framework_meta,
-      extract_item_list=('META/*',))
-  extract_items(
-      target_files=vendor_target_files,
-      target_files_temp_dir=vendor_meta,
-      extract_item_list=('META/*',))
-  process_special_cases(
-      temp_dir=temp_dir,
-      framework_meta=framework_meta,
-      vendor_meta=vendor_meta,
-      output_target_files_temp_dir=output_target_files_temp_dir,
-      framework_misc_info_keys=framework_misc_info_keys,
-      framework_partition_set=item_list_to_partition_set(framework_item_list),
-      vendor_partition_set=item_list_to_partition_set(vendor_item_list),
-      framework_dexpreopt_tools=framework_dexpreopt_tools,
-      framework_dexpreopt_config=framework_dexpreopt_config,
-      vendor_dexpreopt_config=vendor_dexpreopt_config)
-
-  return output_target_files_temp_dir
-
-
-def generate_images(target_files_dir, rebuild_recovery):
-  """Generate images from target files.
-
-  This function takes merged output temporary directory and create images
-  from it.
-
-  Args:
-    target_files_dir: Path to merged temp directory.
-    rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
-      devices and write it to the system image.
-  """
-
-  # Regenerate IMAGES in the target directory.
-
-  add_img_args = [
-      '--verbose',
-      '--add_missing',
-  ]
-  # TODO(b/132730255): Remove this if statement.
-  if rebuild_recovery:
-    add_img_args.append('--rebuild_recovery')
-  add_img_args.append(target_files_dir)
-
-  add_img_to_target_files.main(add_img_args)
-
-
-def rebuild_image_with_sepolicy(target_files_dir,
-                                vendor_otatools=None,
-                                vendor_target_files=None):
-  """Rebuilds odm.img or vendor.img to include merged sepolicy files.
-
-  If odm is present then odm is preferred -- otherwise vendor is used.
-
-  Args:
-    target_files_dir: Path to the extracted merged target-files package.
-    vendor_otatools: If not None, path to an otatools.zip from the vendor build
-      that is used when recompiling the image.
-    vendor_target_files: Expected if vendor_otatools is not None. Path to the
-      vendor target-files zip.
-  """
-  partition = 'vendor'
-  if os.path.exists(os.path.join(target_files_dir, 'ODM')) or os.path.exists(
-      os.path.join(target_files_dir, 'IMAGES/odm.img')):
-    partition = 'odm'
-  partition_img = '{}.img'.format(partition)
-
-  logger.info('Recompiling %s using the merged sepolicy files.', partition_img)
-
-  # Copy the combined SEPolicy file and framework hashes to the image that is
-  # being rebuilt.
-  def copy_selinux_file(input_path, output_filename):
-    input_filename = os.path.join(target_files_dir, input_path)
-    if not os.path.exists(input_filename):
-      input_filename = input_filename.replace('SYSTEM_EXT/', 'SYSTEM/system_ext/') \
-          .replace('PRODUCT/', 'SYSTEM/product/')
-      if not os.path.exists(input_filename):
-        logger.info('Skipping copy_selinux_file for %s', input_filename)
-        return
-    shutil.copy(
-        input_filename,
-        os.path.join(target_files_dir, partition.upper(), 'etc/selinux',
-                     output_filename))
-
-  copy_selinux_file('META/combined_sepolicy', 'precompiled_sepolicy')
-  copy_selinux_file('SYSTEM/etc/selinux/plat_sepolicy_and_mapping.sha256',
-                    'precompiled_sepolicy.plat_sepolicy_and_mapping.sha256')
-  copy_selinux_file(
-      'SYSTEM_EXT/etc/selinux/system_ext_sepolicy_and_mapping.sha256',
-      'precompiled_sepolicy.system_ext_sepolicy_and_mapping.sha256')
-  copy_selinux_file('PRODUCT/etc/selinux/product_sepolicy_and_mapping.sha256',
-                    'precompiled_sepolicy.product_sepolicy_and_mapping.sha256')
-
-  if not vendor_otatools:
-    # Remove the partition from the merged target-files archive. It will be
-    # rebuilt later automatically by generate_images().
-    os.remove(os.path.join(target_files_dir, 'IMAGES', partition_img))
-  else:
-    # TODO(b/192253131): Remove the need for vendor_otatools by fixing
-    # backwards-compatibility issues when compiling images on R from S+.
-    if not vendor_target_files:
-      raise ValueError(
-          'Expected vendor_target_files if vendor_otatools is not None.')
-    logger.info(
-        '%s recompilation will be performed using the vendor otatools.zip',
-        partition_img)
-
-    # Unzip the vendor build's otatools.zip and target-files archive.
-    vendor_otatools_dir = common.MakeTempDir(
-        prefix='merge_target_files_vendor_otatools_')
-    vendor_target_files_dir = common.MakeTempDir(
-        prefix='merge_target_files_vendor_target_files_')
-    common.UnzipToDir(vendor_otatools, vendor_otatools_dir)
-    common.UnzipToDir(vendor_target_files, vendor_target_files_dir)
-
-    # Copy the partition contents from the merged target-files archive to the
-    # vendor target-files archive.
-    shutil.rmtree(os.path.join(vendor_target_files_dir, partition.upper()))
-    shutil.copytree(
-        os.path.join(target_files_dir, partition.upper()),
-        os.path.join(vendor_target_files_dir, partition.upper()),
-        symlinks=True)
-
-    # Delete then rebuild the partition.
-    os.remove(os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
-    rebuild_partition_command = [
-        os.path.join(vendor_otatools_dir, 'bin', 'add_img_to_target_files'),
-        '--verbose',
-        '--add_missing',
-        vendor_target_files_dir,
-    ]
-    logger.info('Recompiling %s: %s', partition_img,
-                ' '.join(rebuild_partition_command))
-    common.RunAndCheckOutput(rebuild_partition_command, verbose=True)
-
-    # Move the newly-created image to the merged target files dir.
-    if not os.path.exists(os.path.join(target_files_dir, 'IMAGES')):
-      os.makedirs(os.path.join(target_files_dir, 'IMAGES'))
-    shutil.move(
-        os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
-        os.path.join(target_files_dir, 'IMAGES', partition_img))
-
-
-def generate_super_empty_image(target_dir, output_super_empty):
-  """Generates super_empty image from target package.
-
-  Args:
-    target_dir: Path to the target file package which contains misc_info.txt for
-      detailed information for super image.
-    output_super_empty: If provided, copies a super_empty.img file from the
-      target files package to this path.
-  """
-  # Create super_empty.img using the merged misc_info.txt.
-
-  misc_info_txt = os.path.join(target_dir, 'META', 'misc_info.txt')
-
-  use_dynamic_partitions = common.LoadDictionaryFromFile(misc_info_txt).get(
-      'use_dynamic_partitions')
-
-  if use_dynamic_partitions != 'true' and output_super_empty:
-    raise ValueError(
-        'Building super_empty.img requires use_dynamic_partitions=true.')
-  elif use_dynamic_partitions == 'true':
-    super_empty_img = os.path.join(target_dir, 'IMAGES', 'super_empty.img')
-    build_super_image_args = [
-        misc_info_txt,
-        super_empty_img,
-    ]
-    build_super_image.main(build_super_image_args)
-
-    # Copy super_empty.img to the user-provided output_super_empty location.
-    if output_super_empty:
-      shutil.copyfile(super_empty_img, output_super_empty)
-
-
-def create_target_files_archive(output_file, source_dir, temp_dir):
-  """Creates archive from target package.
-
-  Args:
-    output_file: The name of the zip archive target files package.
-    source_dir: The target directory contains package to be archived.
-    temp_dir: Path to temporary directory for any intermediate files.
-  """
-  output_target_files_list = os.path.join(temp_dir, 'output.list')
-  output_zip = os.path.abspath(output_file)
-  output_target_files_meta_dir = os.path.join(source_dir, 'META')
-
-  def files_from_path(target_path, extra_args=None):
-    """Gets files under the given path and return a sorted list."""
-    find_command = ['find', target_path] + (extra_args or [])
-    find_process = common.Run(
-        find_command, stdout=subprocess.PIPE, verbose=False)
-    return common.RunAndCheckOutput(['sort'],
-                                    stdin=find_process.stdout,
-                                    verbose=False)
-
-  meta_content = files_from_path(output_target_files_meta_dir)
-  other_content = files_from_path(
-      source_dir,
-      ['-path', output_target_files_meta_dir, '-prune', '-o', '-print'])
-
-  with open(output_target_files_list, 'w') as f:
-    f.write(meta_content)
-    f.write(other_content)
-
-  command = [
-      'soong_zip',
-      '-d',
-      '-o',
-      output_zip,
-      '-C',
-      source_dir,
-      '-r',
-      output_target_files_list,
-  ]
-
-  logger.info('creating %s', output_file)
-  common.RunAndCheckOutput(command, verbose=True)
-  logger.info('finished creating %s', output_file)
-
-  return output_zip
-
-
-def merge_target_files(temp_dir, framework_target_files, framework_item_list,
-                       framework_misc_info_keys, vendor_target_files,
-                       vendor_item_list, output_target_files, output_dir,
-                       output_item_list, output_ota, output_img,
-                       output_super_empty, rebuild_recovery, vendor_otatools,
-                       rebuild_sepolicy, framework_dexpreopt_tools,
-                       framework_dexpreopt_config, vendor_dexpreopt_config):
-  """Merges two target files packages together.
-
-  This function takes framework and vendor target files packages as input,
-  performs various file extractions, special case processing, and finally
-  creates a merged zip archive as output.
-
-  Args:
-    temp_dir: The name of a directory we use when we extract items from the
-      input target files packages, and also a scratch directory that we use for
-      temporary files.
-    framework_target_files: The name of the zip archive containing the framework
-      partial target files package.
-    framework_item_list: The list of items to extract from the partial framework
-      target files package as is, meaning these items will land in the output
-      target files package exactly as they appear in the input partial framework
-      target files package.
-    framework_misc_info_keys: A list of keys to obtain from the framework
-      instance of META/misc_info.txt. The remaining keys should come from the
-      vendor instance.
-    vendor_target_files: The name of the zip archive containing the vendor
-      partial target files package.
-    vendor_item_list: The list of items to extract from the partial vendor
-      target files package as is, meaning these items will land in the output
-      target files package exactly as they appear in the input partial vendor
-      target files package.
-    output_target_files: The name of the output zip archive target files package
-      created by merging framework and vendor.
-    output_dir: The destination directory for saving merged files.
-    output_item_list: The list of items to copy into the output_dir.
-    output_ota: The name of the output zip archive ota package.
-    output_img: The name of the output zip archive img package.
-    output_super_empty: If provided, creates a super_empty.img file from the
-      merged target files package and saves it at this path.
-    rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
-      devices and write it to the system image.
-    vendor_otatools: Path to an otatools zip used for recompiling vendor images.
-    rebuild_sepolicy: If true, rebuild odm.img (if target uses ODM) or
-      vendor.img using a merged precompiled_sepolicy file.
-
-    The following are only used if dexpreopt is applied:
-
-    framework_dexpreopt_tools: Location of dexpreopt_tools.zip.
-    framework_dexpreopt_config: Location of framework's dexpreopt_config.zip.
-    vendor_dexpreopt_config: Location of vendor's dexpreopt_config.zip.
-  """
-
-  logger.info('starting: merge framework %s and vendor %s into output %s',
-              framework_target_files, vendor_target_files, output_target_files)
-
-  output_target_files_temp_dir = create_merged_package(
-      temp_dir, framework_target_files, framework_item_list,
-      vendor_target_files, vendor_item_list, framework_misc_info_keys,
-      rebuild_recovery, framework_dexpreopt_tools, framework_dexpreopt_config,
-      vendor_dexpreopt_config)
-
-  if not check_target_files_vintf.CheckVintf(output_target_files_temp_dir):
-    raise RuntimeError('Incompatible VINTF metadata')
-
-  partition_map = common.PartitionMapFromTargetFiles(
-      output_target_files_temp_dir)
-
-  # Generate and check for cross-partition violations of sharedUserId
-  # values in APKs. This requires the input target-files packages to contain
-  # *.apk files.
-  shareduid_violation_modules = os.path.join(
-      output_target_files_temp_dir, 'META', 'shareduid_violation_modules.json')
-  with open(shareduid_violation_modules, 'w') as f:
-    violation = find_shareduid_violation.FindShareduidViolation(
-        output_target_files_temp_dir, partition_map)
-
-    # Write the output to a file to enable debugging.
-    f.write(violation)
-
-    # Check for violations across the input builds' partition groups.
-    framework_partitions = item_list_to_partition_set(framework_item_list)
-    vendor_partitions = item_list_to_partition_set(vendor_item_list)
-    shareduid_errors = common.SharedUidPartitionViolations(
-        json.loads(violation), [framework_partitions, vendor_partitions])
-    if shareduid_errors:
-      for error in shareduid_errors:
-        logger.error(error)
-      raise ValueError('sharedUserId APK error. See %s' %
-                       shareduid_violation_modules)
-
-  # host_init_verifier and secilc check only the following partitions:
-  filtered_partitions = {
-      partition: path
-      for partition, path in partition_map.items()
-      if partition in ['system', 'system_ext', 'product', 'vendor', 'odm']
-  }
-
-  # Run host_init_verifier on the combined init rc files.
-  common.RunHostInitVerifier(
-      product_out=output_target_files_temp_dir,
-      partition_map=filtered_partitions)
-
-  # Check that the split sepolicy from the multiple builds can compile.
-  split_sepolicy_cmd = compile_split_sepolicy(output_target_files_temp_dir,
-                                              filtered_partitions)
-  logger.info('Compiling split sepolicy: %s', ' '.join(split_sepolicy_cmd))
-  common.RunAndCheckOutput(split_sepolicy_cmd)
-  # Include the compiled policy in an image if requested.
-  if rebuild_sepolicy:
-    rebuild_image_with_sepolicy(output_target_files_temp_dir, vendor_otatools,
-                                vendor_target_files)
-
-  # Run validation checks on the pre-installed APEX files.
-  validate_merged_apex_info(output_target_files_temp_dir, partition_map.keys())
-
-  generate_images(output_target_files_temp_dir, rebuild_recovery)
-
-  generate_super_empty_image(output_target_files_temp_dir, output_super_empty)
-
-  # Finally, create the output target files zip archive and/or copy the
-  # output items to the output target files directory.
-
-  if output_dir:
-    copy_items(output_target_files_temp_dir, output_dir, output_item_list)
-
-  if not output_target_files:
-    return
-
-  # Create the merged META/care_map.bp
-  generate_care_map(partition_map.keys(), output_target_files_temp_dir)
-
-  output_zip = create_target_files_archive(output_target_files,
-                                           output_target_files_temp_dir,
-                                           temp_dir)
-
-  # Create the IMG package from the merged target files package.
-  if output_img:
-    img_from_target_files.main([output_zip, output_img])
-
-  # Create the OTA package from the merged target files package.
-
-  if output_ota:
-    ota_from_target_files.main([output_zip, output_ota])
-
-
-def call_func_with_temp_dir(func, keep_tmp):
-  """Manages the creation and cleanup of the temporary directory.
-
-  This function calls the given function after first creating a temporary
-  directory. It also cleans up the temporary directory.
-
-  Args:
-    func: The function to call. Should accept one parameter, the path to the
-      temporary directory.
-    keep_tmp: Keep the temporary directory after processing is complete.
-  """
-
-  # Create a temporary directory. This will serve as the parent of directories
-  # we use when we extract items from the input target files packages, and also
-  # a scratch directory that we use for temporary files.
-
-  temp_dir = common.MakeTempDir(prefix='merge_target_files_')
-
-  try:
-    func(temp_dir)
-  finally:
-    if keep_tmp:
-      logger.info('keeping %s', temp_dir)
-    else:
-      common.Cleanup()
-
-
-def main():
-  """The main function.
-
-  Process command line arguments, then call merge_target_files to
-  perform the heavy lifting.
-  """
-
-  common.InitLogging()
-
-  def option_handler(o, a):
-    if o == '--system-target-files':
-      logger.warning(
-          '--system-target-files has been renamed to --framework-target-files')
-      OPTIONS.framework_target_files = a
-    elif o == '--framework-target-files':
-      OPTIONS.framework_target_files = a
-    elif o == '--system-item-list':
-      logger.warning(
-          '--system-item-list has been renamed to --framework-item-list')
-      OPTIONS.framework_item_list = a
-    elif o == '--framework-item-list':
-      OPTIONS.framework_item_list = a
-    elif o == '--system-misc-info-keys':
-      logger.warning('--system-misc-info-keys has been renamed to '
-                     '--framework-misc-info-keys')
-      OPTIONS.framework_misc_info_keys = a
-    elif o == '--framework-misc-info-keys':
-      OPTIONS.framework_misc_info_keys = a
-    elif o == '--other-target-files':
-      logger.warning(
-          '--other-target-files has been renamed to --vendor-target-files')
-      OPTIONS.vendor_target_files = a
-    elif o == '--vendor-target-files':
-      OPTIONS.vendor_target_files = a
-    elif o == '--other-item-list':
-      logger.warning('--other-item-list has been renamed to --vendor-item-list')
-      OPTIONS.vendor_item_list = a
-    elif o == '--vendor-item-list':
-      OPTIONS.vendor_item_list = a
-    elif o == '--output-target-files':
-      OPTIONS.output_target_files = a
-    elif o == '--output-dir':
-      OPTIONS.output_dir = a
-    elif o == '--output-item-list':
-      OPTIONS.output_item_list = a
-    elif o == '--output-ota':
-      OPTIONS.output_ota = a
-    elif o == '--output-img':
-      OPTIONS.output_img = a
-    elif o == '--output-super-empty':
-      OPTIONS.output_super_empty = a
-    elif o == '--rebuild_recovery':  # TODO(b/132730255): Warn
-      OPTIONS.rebuild_recovery = True
-    elif o == '--allow-duplicate-apkapex-keys':
-      OPTIONS.allow_duplicate_apkapex_keys = True
-    elif o == '--vendor-otatools':
-      OPTIONS.vendor_otatools = a
-    elif o == '--rebuild-sepolicy':
-      OPTIONS.rebuild_sepolicy = True
-    elif o == '--keep-tmp':
-      OPTIONS.keep_tmp = True
-    elif o == '--framework-dexpreopt-config':
-      OPTIONS.framework_dexpreopt_config = a
-    elif o == '--framework-dexpreopt-tools':
-      OPTIONS.framework_dexpreopt_tools = a
-    elif o == '--vendor-dexpreopt-config':
-      OPTIONS.vendor_dexpreopt_config = a
-    else:
-      return False
-    return True
-
-  args = common.ParseOptions(
-      sys.argv[1:],
-      __doc__,
-      extra_long_opts=[
-          'system-target-files=',
-          'framework-target-files=',
-          'system-item-list=',
-          'framework-item-list=',
-          'system-misc-info-keys=',
-          'framework-misc-info-keys=',
-          'other-target-files=',
-          'vendor-target-files=',
-          'other-item-list=',
-          'vendor-item-list=',
-          'output-target-files=',
-          'output-dir=',
-          'output-item-list=',
-          'output-ota=',
-          'output-img=',
-          'output-super-empty=',
-          'framework-dexpreopt-config=',
-          'framework-dexpreopt-tools=',
-          'vendor-dexpreopt-config=',
-          'rebuild_recovery',
-          'allow-duplicate-apkapex-keys',
-          'vendor-otatools=',
-          'rebuild-sepolicy',
-          'keep-tmp',
-      ],
-      extra_option_handler=option_handler)
-
-  # pylint: disable=too-many-boolean-expressions
-  if (args or OPTIONS.framework_target_files is None or
-      OPTIONS.vendor_target_files is None or
-      (OPTIONS.output_target_files is None and OPTIONS.output_dir is None) or
-      (OPTIONS.output_dir is not None and OPTIONS.output_item_list is None)):
-    common.Usage(__doc__)
-    sys.exit(1)
-
-  if OPTIONS.framework_item_list:
-    framework_item_list = common.LoadListFromFile(OPTIONS.framework_item_list)
-  else:
-    framework_item_list = DEFAULT_FRAMEWORK_ITEM_LIST
-
-  if OPTIONS.framework_misc_info_keys:
-    framework_misc_info_keys = common.LoadListFromFile(
-        OPTIONS.framework_misc_info_keys)
-  else:
-    framework_misc_info_keys = DEFAULT_FRAMEWORK_MISC_INFO_KEYS
-
-  if OPTIONS.vendor_item_list:
-    vendor_item_list = common.LoadListFromFile(OPTIONS.vendor_item_list)
-  else:
-    vendor_item_list = DEFAULT_VENDOR_ITEM_LIST
-
-  if OPTIONS.output_item_list:
-    output_item_list = common.LoadListFromFile(OPTIONS.output_item_list)
-  else:
-    output_item_list = None
-
-  if not validate_config_lists(
-      framework_item_list=framework_item_list,
-      framework_misc_info_keys=framework_misc_info_keys,
-      vendor_item_list=vendor_item_list):
-    sys.exit(1)
-
-  call_func_with_temp_dir(
-      lambda temp_dir: merge_target_files(
-          temp_dir=temp_dir,
-          framework_target_files=OPTIONS.framework_target_files,
-          framework_item_list=framework_item_list,
-          framework_misc_info_keys=framework_misc_info_keys,
-          vendor_target_files=OPTIONS.vendor_target_files,
-          vendor_item_list=vendor_item_list,
-          output_target_files=OPTIONS.output_target_files,
-          output_dir=OPTIONS.output_dir,
-          output_item_list=output_item_list,
-          output_ota=OPTIONS.output_ota,
-          output_img=OPTIONS.output_img,
-          output_super_empty=OPTIONS.output_super_empty,
-          rebuild_recovery=OPTIONS.rebuild_recovery,
-          vendor_otatools=OPTIONS.vendor_otatools,
-          rebuild_sepolicy=OPTIONS.rebuild_sepolicy,
-          framework_dexpreopt_tools=OPTIONS.framework_dexpreopt_tools,
-          framework_dexpreopt_config=OPTIONS.framework_dexpreopt_config,
-          vendor_dexpreopt_config=OPTIONS.vendor_dexpreopt_config), OPTIONS.keep_tmp)
-
-
-if __name__ == '__main__':
-  main()
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 471ef25..9732cda 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -74,7 +74,7 @@
 
   block_diff_dict = collections.OrderedDict()
   partition_names = ["system", "vendor", "product", "odm", "system_ext",
-                     "vendor_dlkm", "odm_dlkm"]
+                     "vendor_dlkm", "odm_dlkm", "system_dlkm"]
   for partition in partition_names:
     if not HasPartition(target_zip, partition):
       continue
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 9b9422c..522d489 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -237,6 +237,13 @@
   --enable_lz4diff
       Whether to enable lz4diff feature. Will generate smaller OTA for EROFS but
       uses more memory.
+
+  --spl_downgrade
+      Force generate an SPL downgrade OTA. Only needed if target build has an
+      older SPL.
+
+  --vabc_compression_param
+      Compression algorithm to be used for VABC. Available options: gz, brotli, none
 """
 
 from __future__ import print_function
@@ -308,6 +315,7 @@
 OPTIONS.compressor_types = None
 OPTIONS.enable_zucchini = True
 OPTIONS.enable_lz4diff = False
+OPTIONS.vabc_compression_param = None
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -316,15 +324,15 @@
 # Files to be unzipped for target diffing purpose.
 TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*',
                                 'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*',
-                                'VENDOR_DLKM/*', 'ODM_DLKM/*']
+                                'VENDOR_DLKM/*', 'ODM_DLKM/*', 'SYSTEM_DLKM/*']
 RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS]
 
 # Images to be excluded from secondary payload. We essentially only keep
 # 'system_other' and bootloader partitions.
 SECONDARY_PAYLOAD_SKIPPED_IMAGES = [
     'boot', 'dtbo', 'modem', 'odm', 'odm_dlkm', 'product', 'radio', 'recovery',
-    'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor', 'vendor',
-    'vendor_boot']
+    'system_dlkm', 'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor',
+    'vendor', 'vendor_boot']
 
 
 class PayloadSigner(object):
@@ -535,8 +543,7 @@
 
   oem_dicts = []
   for oem_file in oem_source:
-    with open(oem_file) as fp:
-      oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
+    oem_dicts.append(common.LoadDictionaryFromFile(oem_file))
   return oem_dicts
 
 
@@ -647,6 +654,24 @@
     return (payload_offset, metadata_total)
 
 
+def ModifyVABCCompressionParam(content, algo):
+  """ Update update VABC Compression Param in dynamic_partitions_info.txt
+  Args:
+    content: The string content of dynamic_partitions_info.txt
+    algo: The compression algorithm should be used for VABC. See
+          https://cs.android.com/android/platform/superproject/+/master:system/core/fs_mgr/libsnapshot/cow_writer.cpp;l=127;bpv=1;bpt=1?q=CowWriter::ParseOptions&sq=
+  Returns:
+    Updated content of dynamic_partitions_info.txt , with custom compression algo
+  """
+  output_list = []
+  for line in content.splitlines():
+    if line.startswith("virtual_ab_compression_method="):
+      continue
+    output_list.append(line)
+  output_list.append("virtual_ab_compression_method="+algo)
+  return "\n".join(output_list)
+
+
 def UpdatesInfoForSpecialUpdates(content, partitions_filter,
                                  delete_keys=None):
   """ Updates info file for secondary payload generation, partial update, etc.
@@ -801,6 +826,27 @@
     return common.LoadInfoDict(zfp)
 
 
+def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
+  """Returns a target-files.zip with a custom VABC compression param.
+  Args:
+    input_file: The input target-files.zip path
+    vabc_compression_param: Custom Virtual AB Compression algorithm
+
+  Returns:
+    The path to modified target-files.zip
+  """
+  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+  shutil.copyfile(input_file, target_file)
+  common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
+  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as zfp:
+    dynamic_partition_info = zfp.read(DYNAMIC_PARTITION_INFO).decode()
+    dynamic_partition_info = ModifyVABCCompressionParam(
+        dynamic_partition_info, vabc_compression_param)
+    with zipfile.ZipFile(target_file, "a", allowZip64=True) as output_zip:
+      output_zip.writestr(DYNAMIC_PARTITION_INFO, dynamic_partition_info)
+  return target_file
+
+
 def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
   """Returns a target-files.zip for partial ota update package generation.
 
@@ -875,6 +921,9 @@
       content = input_zip.read(info_file).decode()
       modified_info = UpdatesInfoForSpecialUpdates(
           content, lambda p: p in ab_partitions)
+      if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
+        modified_info = ModifyVABCCompressionParam(
+            modified_info, OPTIONS.vabc_compression_param)
       common.ZipWriteStr(partial_target_zip, info_file, modified_info)
 
     # TODO(xunchang) handle META/postinstall_config.txt'
@@ -1105,7 +1154,12 @@
   if target_info.vendor_suppressed_vabc:
     logger.info("Vendor suppressed VABC. Disabling")
     OPTIONS.disable_vabc = True
-  if not target_info.is_vabc_xor or OPTIONS.disable_vabc:
+
+  # Both source and target build need to support VABC XOR for us to use it.
+  # Source build's update_engine must be able to write XOR ops, and target
+  # build's snapuserd must be able to interpret XOR ops.
+  if not target_info.is_vabc_xor or OPTIONS.disable_vabc or \
+          (source_info is not None and not source_info.is_vabc_xor):
     logger.info("VABC XOR Not supported, disabling")
     OPTIONS.enable_vabc_xor = False
   additional_args = []
@@ -1123,6 +1177,9 @@
     target_file = GetTargetFilesZipForPartialUpdates(target_file,
                                                      OPTIONS.partial)
     additional_args += ["--is_partial_update", "true"]
+  elif OPTIONS.vabc_compression_param:
+    target_file = GetTargetFilesZipForCustomVABCCompression(
+        target_file, OPTIONS.vabc_compression_param)
   elif OPTIONS.skip_postinstall:
     target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
   # Target_file may have been modified, reparse ab_partitions
@@ -1157,7 +1214,7 @@
                       str(OPTIONS.enable_zucchini).lower()]
 
   if not ota_utils.IsLz4diffCompatible(source_file, target_file):
-    logger.warn(
+    logger.warning(
         "Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
     OPTIONS.enable_lz4diff = False
 
@@ -1368,6 +1425,8 @@
     elif o == "--enable_lz4diff":
       assert a.lower() in ["true", "false"]
       OPTIONS.enable_lz4diff = a.lower() != "false"
+    elif o == "--vabc_compression_param":
+      OPTIONS.vabc_compression_param = a.lower()
     else:
       return False
     return True
@@ -1415,8 +1474,9 @@
                                  "enable_vabc_xor=",
                                  "force_minor_version=",
                                  "compressor_types=",
-                                 "enable_zucchin=",
+                                 "enable_zucchini=",
                                  "enable_lz4diff=",
+                                 "vabc_compression_param=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 6896f83..5d403dc 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -569,7 +569,8 @@
       tokens.append('metadata.pb:' + ' ' * 15)
     else:
       tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
-      tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
+      if METADATA_PROTO_NAME in zip_file.namelist():
+          tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
 
     return ','.join(tokens)
 
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index 66f5e05..722359b 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -42,6 +42,15 @@
 
   --sign_tool <sign_tool>
       Optional flag that specifies a custom signing tool for the contents of the apex.
+
+  --sepolicy_key <key>
+      Optional flag that specifies the sepolicy signing key, defaults to payload_key.
+
+  --sepolicy_cert <cert>
+      Optional flag that specifies the sepolicy signing cert.
+
+  --fsverity_tool <path>
+      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
 """
 
 import logging
@@ -55,7 +64,8 @@
 
 
 def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
-                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None):
+                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None,
+                 sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
   """Signs the given apex file."""
   with open(apex_file, 'rb') as input_fp:
     apex_data = input_fp.read()
@@ -70,7 +80,11 @@
       no_hashtree=no_hashtree,
       apk_keys=apk_keys,
       signing_args=signing_args,
-      sign_tool=sign_tool)
+      sign_tool=sign_tool,
+      is_sepolicy=apex_file.endswith("sepolicy.apex"),
+      sepolicy_key=sepolicy_key,
+      sepolicy_cert=sepolicy_cert,
+      fsverity_tool=fsverity_tool)
 
 
 def main(argv):
@@ -106,6 +120,12 @@
         options['extra_apks'].update({n: key})
     elif o == '--sign_tool':
       options['sign_tool'] = a
+    elif o == '--sepolicy_key':
+      options['sepolicy_key'] = a
+    elif o == '--sepolicy_cert':
+      options['sepolicy_cert'] = a
+    elif o == '--fsverity_tool':
+      options['fsverity_tool'] = a
     else:
       return False
     return True
@@ -121,6 +141,9 @@
           'payload_key=',
           'extra_apks=',
           'sign_tool=',
+          'sepolicy_key=',
+          'sepolicy_cert=',
+          'fsverity_tool='
       ],
       extra_option_handler=option_handler)
 
@@ -141,7 +164,10 @@
       signing_args=options.get('payload_extra_args'),
       codename_to_api_level_map=options.get(
           'codename_to_api_level_map', {}),
-      sign_tool=options.get('sign_tool', None))
+      sign_tool=options.get('sign_tool', None),
+      sepolicy_key=options.get('sepolicy_key', None),
+      sepolicy_cert=options.get('sepolicy_cert', None),
+      fsverity_tool=options.get('fsverity_tool', None))
   shutil.copyfile(signed_apex, args[1])
   logger.info("done.")
 
@@ -149,8 +175,5 @@
 if __name__ == '__main__':
   try:
     main(sys.argv[1:])
-  except common.ExternalError:
-    logger.exception("\n   ERROR:\n")
-    sys.exit(1)
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c615b84..a24fbdd 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -206,6 +206,7 @@
     'product': 'avb_product_add_hashtree_footer_args',
     'recovery': 'avb_recovery_add_hash_footer_args',
     'system': 'avb_system_add_hashtree_footer_args',
+    'system_dlkm': "avb_system_dlkm_add_hashtree_footer_args",
     'system_ext': 'avb_system_ext_add_hashtree_footer_args',
     'system_other': 'avb_system_other_add_hashtree_footer_args',
     'odm': 'avb_odm_add_hashtree_footer_args',
@@ -687,6 +688,39 @@
         print("    Rewriting AVB public key of system_other in /product")
         common.ZipWrite(output_tf_zip, public_key, filename)
 
+    # Updates pvmfw embedded public key with the virt APEX payload key.
+    elif filename == "PREBUILT_IMAGES/pvmfw.img":
+      # Find the name of the virt APEX in the target files.
+      namelist = input_tf_zip.namelist()
+      apex_gen = (GetApexFilename(f) for f in namelist if IsApexFile(f))
+      virt_apex_re = re.compile("^com\.([^\.]+\.)?android\.virt\.apex$")
+      virt_apex = next((a for a in apex_gen if virt_apex_re.match(a)), None)
+      if not virt_apex:
+        print("Removing %s from ramdisk: virt APEX not found" % filename)
+      else:
+        print("Replacing %s embedded key with %s key" % (filename, virt_apex))
+        # Get the current and new embedded keys.
+        payload_key, container_key, sign_tool = apex_keys[virt_apex]
+        new_pubkey_path = common.ExtractAvbPublicKey(
+            misc_info['avb_avbtool'], payload_key)
+        with open(new_pubkey_path, 'rb') as f:
+          new_pubkey = f.read()
+        pubkey_info = copy.copy(
+            input_tf_zip.getinfo("PREBUILT_IMAGES/pvmfw_embedded.avbpubkey"))
+        old_pubkey = input_tf_zip.read(pubkey_info.filename)
+        # Validate the keys and image.
+        if len(old_pubkey) != len(new_pubkey):
+          raise common.ExternalError("pvmfw embedded public key size mismatch")
+        pos = data.find(old_pubkey)
+        if pos == -1:
+          raise common.ExternalError("pvmfw embedded public key not found")
+        # Replace the key and copy new files.
+        new_data = data[:pos] + new_pubkey + data[pos+len(old_pubkey):]
+        common.ZipWriteStr(output_tf_zip, out_info, new_data)
+        common.ZipWriteStr(output_tf_zip, pubkey_info, new_pubkey)
+    elif filename == "PREBUILT_IMAGES/pvmfw_embedded.avbpubkey":
+      pass
+
     # Should NOT sign boot-debug.img.
     elif filename in (
         "BOOT/RAMDISK/force_debuggable",
@@ -1243,6 +1277,7 @@
   logger.info("Building vendor partitions using vendor otatools.")
   vendor_tempdir = common.UnzipTemp(output_zip_path, [
       "META/*",
+      "SYSTEM/build.prop",
   ] + ["{}/*".format(p.upper()) for p in OPTIONS.vendor_partitions])
 
   # Disable various partitions that build based on misc_info fields.
@@ -1265,16 +1300,37 @@
     for key in sorted(vendor_misc_info):
       output.write("{}={}\n".format(key, vendor_misc_info[key]))
 
+  # Disable system partition by a placeholder of IMAGES/system.img,
+  # instead of removing SYSTEM folder.
+  # Because SYSTEM/build.prop is still needed for:
+  #   add_img_to_target_files.CreateImage ->
+  #   common.BuildInfo ->
+  #   common.BuildInfo.CalculateFingerprint
+  vendor_images_path = os.path.join(vendor_tempdir, "IMAGES")
+  if not os.path.exists(vendor_images_path):
+    os.makedirs(vendor_images_path)
+  with open(os.path.join(vendor_images_path, "system.img"), "w") as output:
+    pass
+
   # Disable care_map.pb as not all ab_partitions are available when
   # vendor otatools regenerates vendor images.
-  os.remove(os.path.join(vendor_tempdir, "META/ab_partitions.txt"))
+  if os.path.exists(os.path.join(vendor_tempdir, "META/ab_partitions.txt")):
+    os.remove(os.path.join(vendor_tempdir, "META/ab_partitions.txt"))
+  # Disable RADIO images
+  if os.path.exists(os.path.join(vendor_tempdir, "META/pack_radioimages.txt")):
+    os.remove(os.path.join(vendor_tempdir, "META/pack_radioimages.txt"))
 
   # Build vendor images using vendor otatools.
-  vendor_otatools_dir = common.MakeTempDir(prefix="vendor_otatools_")
-  common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
+  # Accept either a zip file or extracted directory.
+  if os.path.isfile(OPTIONS.vendor_otatools):
+    vendor_otatools_dir = common.MakeTempDir(prefix="vendor_otatools_")
+    common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
+  else:
+    vendor_otatools_dir = OPTIONS.vendor_otatools
   cmd = [
       os.path.join(vendor_otatools_dir, "bin", "add_img_to_target_files"),
       "--is_signing",
+      "--add_missing",
       "--verbose",
       vendor_tempdir,
   ]
@@ -1520,8 +1576,5 @@
 if __name__ == '__main__':
   try:
     main(sys.argv[1:])
-  except common.ExternalError as e:
-    print("\n   ERROR: %s\n" % (e,))
-    raise
   finally:
     common.Cleanup()
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 7dd365f..f973263 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -1642,7 +1642,7 @@
     }
     test_file = tempfile.NamedTemporaryFile()
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
-                      test_file.name, 'generic_kernel', 'boot')
+                      test_file.name, 'generic_kernel')
 
   def test_GenerateGkiCertificate_SearchKeyPathNotFound(self):
     pubkey = 'no_testkey_gki.pem'
@@ -1662,7 +1662,7 @@
     }
     test_file = tempfile.NamedTemporaryFile()
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
-                      test_file.name, 'generic_kernel', 'boot')
+                      test_file.name, 'generic_kernel')
 
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
deleted file mode 100644
index 835edab..0000000
--- a/tools/releasetools/test_merge_target_files.py
+++ /dev/null
@@ -1,310 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os.path
-import shutil
-
-import common
-import test_utils
-from merge_target_files import (
-    validate_config_lists, DEFAULT_FRAMEWORK_ITEM_LIST,
-    DEFAULT_VENDOR_ITEM_LIST, DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
-    item_list_to_partition_set, process_apex_keys_apk_certs_common,
-    compile_split_sepolicy, validate_merged_apex_info)
-
-
-class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
-
-  def setUp(self):
-    self.testdata_dir = test_utils.get_testdata_dir()
-
-  def test_copy_items_CopiesItemsMatchingPatterns(self):
-
-    def createEmptyFile(path):
-      if not os.path.exists(os.path.dirname(path)):
-        os.makedirs(os.path.dirname(path))
-      open(path, 'a').close()
-      return path
-
-    def createSymLink(source, dest):
-      os.symlink(source, dest)
-      return dest
-
-    def getRelPaths(start, filepaths):
-      return set(
-          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
-
-    input_dir = common.MakeTempDir()
-    output_dir = common.MakeTempDir()
-    expected_copied_items = []
-    actual_copied_items = []
-    patterns = ['*.cpp', 'subdir/*.txt']
-
-    # Create various files that we expect to get copied because they
-    # match one of the patterns.
-    expected_copied_items.extend([
-        createEmptyFile(os.path.join(input_dir, 'a.cpp')),
-        createEmptyFile(os.path.join(input_dir, 'b.cpp')),
-        createEmptyFile(os.path.join(input_dir, 'subdir', 'c.txt')),
-        createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
-        createEmptyFile(
-            os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
-        createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
-    ])
-    # Create some more files that we expect to not get copied.
-    createEmptyFile(os.path.join(input_dir, 'a.h'))
-    createEmptyFile(os.path.join(input_dir, 'b.h'))
-    createEmptyFile(os.path.join(input_dir, 'subdir', 'subsubdir', 'f.gif'))
-    createSymLink('a.h', os.path.join(input_dir, 'a_link.h'))
-
-    # Copy items.
-    copy_items(input_dir, output_dir, patterns)
-
-    # Assert the actual copied items match the ones we expected.
-    for dirpath, _, filenames in os.walk(output_dir):
-      actual_copied_items.extend(
-          os.path.join(dirpath, filename) for filename in filenames)
-    self.assertEqual(
-        getRelPaths(output_dir, actual_copied_items),
-        getRelPaths(input_dir, expected_copied_items))
-    self.assertEqual(
-        os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
-
-  def test_validate_config_lists_ReturnsFalseIfMissingDefaultItem(self):
-    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
-    framework_item_list.remove('SYSTEM/*')
-    self.assertFalse(
-        validate_config_lists(framework_item_list,
-                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
-                              DEFAULT_VENDOR_ITEM_LIST))
-
-  def test_validate_config_lists_ReturnsTrueIfDefaultItemInDifferentList(self):
-    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
-    framework_item_list.remove('ROOT/*')
-    vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
-    vendor_item_list.append('ROOT/*')
-    self.assertTrue(
-        validate_config_lists(framework_item_list,
-                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
-                              vendor_item_list))
-
-  def test_validate_config_lists_ReturnsTrueIfExtraItem(self):
-    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
-    framework_item_list.append('MY_NEW_PARTITION/*')
-    self.assertTrue(
-        validate_config_lists(framework_item_list,
-                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
-                              DEFAULT_VENDOR_ITEM_LIST))
-
-  def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartition(self):
-    vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
-    vendor_item_list.append('SYSTEM/my_system_file')
-    self.assertFalse(
-        validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
-                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
-                              vendor_item_list))
-
-  def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartitionImage(
-      self):
-    vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
-    vendor_item_list.append('IMAGES/system.img')
-    self.assertFalse(
-        validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
-                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
-                              vendor_item_list))
-
-  def test_validate_config_lists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
-    for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
-      framework_misc_info_keys = list(DEFAULT_FRAMEWORK_MISC_INFO_KEYS)
-      framework_misc_info_keys.append(bad_key)
-      self.assertFalse(
-          validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
-                                framework_misc_info_keys,
-                                DEFAULT_VENDOR_ITEM_LIST))
-
-  def test_process_apex_keys_apk_certs_ReturnsTrueIfNoConflicts(self):
-    output_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(output_dir, 'META'))
-
-    framework_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(framework_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
-        os.path.join(framework_dir, 'META', 'apexkeys.txt'))
-
-    vendor_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(vendor_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_vendor.txt'),
-        os.path.join(vendor_dir, 'META', 'apexkeys.txt'))
-
-    process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
-                                       set(['product', 'system', 'system_ext']),
-                                       set(['odm', 'vendor']), 'apexkeys.txt')
-
-    merged_entries = []
-    merged_path = os.path.join(self.testdata_dir, 'apexkeys_merge.txt')
-
-    with open(merged_path) as f:
-      merged_entries = f.read().split('\n')
-
-    output_entries = []
-    output_path = os.path.join(output_dir, 'META', 'apexkeys.txt')
-
-    with open(output_path) as f:
-      output_entries = f.read().split('\n')
-
-    return self.assertEqual(merged_entries, output_entries)
-
-  def test_process_apex_keys_apk_certs_ReturnsFalseIfConflictsPresent(self):
-    output_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(output_dir, 'META'))
-
-    framework_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(framework_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
-        os.path.join(framework_dir, 'META', 'apexkeys.txt'))
-
-    conflict_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(conflict_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_framework_conflict.txt'),
-        os.path.join(conflict_dir, 'META', 'apexkeys.txt'))
-
-    self.assertRaises(ValueError, process_apex_keys_apk_certs_common,
-                      framework_dir, conflict_dir, output_dir,
-                      set(['product', 'system', 'system_ext']),
-                      set(['odm', 'vendor']), 'apexkeys.txt')
-
-  def test_process_apex_keys_apk_certs_HandlesApkCertsSyntax(self):
-    output_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(output_dir, 'META'))
-
-    framework_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(framework_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apkcerts_framework.txt'),
-        os.path.join(framework_dir, 'META', 'apkcerts.txt'))
-
-    vendor_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(vendor_dir, 'META'))
-    os.symlink(
-        os.path.join(self.testdata_dir, 'apkcerts_vendor.txt'),
-        os.path.join(vendor_dir, 'META', 'apkcerts.txt'))
-
-    process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
-                                       set(['product', 'system', 'system_ext']),
-                                       set(['odm', 'vendor']), 'apkcerts.txt')
-
-    merged_entries = []
-    merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
-
-    with open(merged_path) as f:
-      merged_entries = f.read().split('\n')
-
-    output_entries = []
-    output_path = os.path.join(output_dir, 'META', 'apkcerts.txt')
-
-    with open(output_path) as f:
-      output_entries = f.read().split('\n')
-
-    return self.assertEqual(merged_entries, output_entries)
-
-  def test_item_list_to_partition_set(self):
-    item_list = [
-        'META/apexkeys.txt',
-        'META/apkcerts.txt',
-        'META/filesystem_config.txt',
-        'PRODUCT/*',
-        'SYSTEM/*',
-        'SYSTEM_EXT/*',
-    ]
-    partition_set = item_list_to_partition_set(item_list)
-    self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
-
-  def test_compile_split_sepolicy(self):
-    product_out_dir = common.MakeTempDir()
-
-    def write_temp_file(path, data=''):
-      full_path = os.path.join(product_out_dir, path)
-      if not os.path.exists(os.path.dirname(full_path)):
-        os.makedirs(os.path.dirname(full_path))
-      with open(full_path, 'w') as f:
-        f.write(data)
-
-    write_temp_file(
-        'system/etc/vintf/compatibility_matrix.device.xml', """
-      <compatibility-matrix>
-        <sepolicy>
-          <kernel-sepolicy-version>30</kernel-sepolicy-version>
-        </sepolicy>
-      </compatibility-matrix>""")
-    write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
-
-    write_temp_file('system/etc/selinux/plat_sepolicy.cil')
-    write_temp_file('system/etc/selinux/mapping/30.0.cil')
-    write_temp_file('product/etc/selinux/mapping/30.0.cil')
-    write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
-    write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
-
-    cmd = compile_split_sepolicy(product_out_dir, {
-        'system': 'system',
-        'product': 'product',
-        'vendor': 'vendor',
-    })
-    self.assertEqual(' '.join(cmd),
-                     ('secilc -m -M true -G -N -c 30 '
-                      '-o {OTP}/META/combined_sepolicy -f /dev/null '
-                      '{OTP}/system/etc/selinux/plat_sepolicy.cil '
-                      '{OTP}/system/etc/selinux/mapping/30.0.cil '
-                      '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
-                      '{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
-                      '{OTP}/product/etc/selinux/mapping/30.0.cil').format(
-                          OTP=product_out_dir))
-
-  def _copy_apex(self, source, output_dir, partition):
-    shutil.copy(
-        source,
-        os.path.join(output_dir, partition, 'apex', os.path.basename(source)))
-
-  @test_utils.SkipIfExternalToolsUnavailable()
-  def test_validate_merged_apex_info(self):
-    output_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
-    os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
-
-    self._copy_apex(
-        os.path.join(self.testdata_dir, 'has_apk.apex'), output_dir, 'SYSTEM')
-    self._copy_apex(
-        os.path.join(test_utils.get_current_dir(),
-                     'com.android.apex.compressed.v1.capex'), output_dir,
-        'VENDOR')
-    validate_merged_apex_info(output_dir, ('system', 'vendor'))
-
-  @test_utils.SkipIfExternalToolsUnavailable()
-  def test_validate_merged_apex_info_RaisesOnPackageInMultiplePartitions(self):
-    output_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
-    os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
-
-    same_apex_package = os.path.join(self.testdata_dir, 'has_apk.apex')
-    self._copy_apex(same_apex_package, output_dir, 'SYSTEM')
-    self._copy_apex(same_apex_package, output_dir, 'VENDOR')
-    self.assertRaisesRegexp(
-        common.ExternalError,
-        'Duplicate APEX packages found in multiple partitions: com.android.wifi',
-        validate_merged_apex_info, output_dir, ('system', 'vendor'))
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index 8470f20..c344e22 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -71,3 +71,21 @@
         False,
         codename_to_api_level_map={'S': 31, 'Tiramisu' : 32})
     self.assertTrue(os.path.exists(signed_apex))
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_SignApexWithSepolicy(self):
+    test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
+    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    container_key = os.path.join(self.testdata_dir, 'testkey')
+    sepolicy_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    sepolicy_cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+    signed_test_apex = sign_apex.SignApexFile(
+        'avbtool',
+        test_apex,
+        payload_key,
+        container_key,
+        False,
+        None,
+        sepolicy_key=sepolicy_key,
+        sepolicy_cert=sepolicy_cert)
+    self.assertTrue(os.path.exists(signed_test_apex))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 808b392..e30d2b9 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -33,6 +33,8 @@
 # Some test runner doesn't like outputs from stderr.
 logging.basicConfig(stream=sys.stdout)
 
+ALLOWED_TEST_SUBDIRS = ('merge',)
+
 # Use ANDROID_BUILD_TOP as an indicator to tell if the needed tools (e.g.
 # avbtool, mke2fs) are available while running the tests, unless
 # FORCE_RUN_RELEASETOOLS is set to '1'. Not having the required vars means we
@@ -244,9 +246,12 @@
   # os walk and load them manually.
   test_modules = []
   base_path = os.path.dirname(os.path.realpath(__file__))
+  test_dirs = [base_path] + [
+      os.path.join(base_path, subdir) for subdir in ALLOWED_TEST_SUBDIRS
+  ]
   for dirpath, _, files in os.walk(base_path):
     for fn in files:
-      if dirpath == base_path and re.match('test_.*\\.py$', fn):
+      if dirpath in test_dirs and re.match('test_.*\\.py$', fn):
         test_modules.append(fn[:-3])
 
   test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
new file mode 100644
index 0000000..f7d267d
--- /dev/null
+++ b/tools/releasetools/testdata/sepolicy.apex
Binary files differ
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 282dc99..beb9e75 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -131,8 +131,10 @@
     logging.warning('Skipped due to target using non-sparse images')
     return
 
-  # Verify IMAGES/system.img.
-  CheckAllFiles('system')
+  # Verify IMAGES/system.img if applicable.
+  # Some targets, e.g., gki_arm64, gki_x86_64, etc., are system.img-less.
+  if 'IMAGES/system.img' in input_zip.namelist():
+    CheckAllFiles('system')
 
   # Verify IMAGES/vendor.img if applicable.
   if 'VENDOR/' in input_zip.namelist():
@@ -259,9 +261,6 @@
 
 def ValidatePartitionFingerprints(input_tmp, info_dict):
   build_info = common.BuildInfo(info_dict)
-  if not build_info.avb_enabled:
-    logging.info("AVB not enabled, skipping partition fingerprint checks")
-    return
   # Expected format:
   #  Prop: com.android.build.vendor.fingerprint -> 'generic/aosp_cf_x86_64_phone/vsoc_x86_64:S/AOSP.MASTER/7335886:userdebug/test-keys'
   #  Prop: com.android.build.vendor_boot.fingerprint -> 'generic/aosp_cf_x86_64_phone/vsoc_x86_64:S/AOSP.MASTER/7335886:userdebug/test-keys'
@@ -398,7 +397,7 @@
           verity_key_mincrypt, stdoutdata.rstrip())
 
   # Handle the case of Verified Boot 2.0 (AVB).
-  if info_dict.get("avb_enable") == "true":
+  if info_dict.get("avb_building_vbmeta_image") == "true":
     logging.info('Verifying Verified Boot 2.0 (AVB) images...')
 
     key = options['verity_key']
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index a08ddbe..d55ad88 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -379,6 +379,11 @@
     self.avbtool = avbtool
     self.algorithm = algorithm
     self.key_path = key_path
+    if key_path and not os.path.exists(key_path) and OPTIONS.search_path:
+      new_key_path = os.path.join(OPTIONS.search_path, key_path)
+      if os.path.exists(new_key_path):
+        self.key_path = new_key_path
+
     self.salt = salt
     self.signing_args = signing_args
     self.image_size = None
diff --git a/tools/warn/tidy_warn_patterns.py b/tools/warn/tidy_warn_patterns.py
index a5842ea..c138f1c 100644
--- a/tools/warn/tidy_warn_patterns.py
+++ b/tools/warn/tidy_warn_patterns.py
@@ -224,6 +224,9 @@
     analyzer_warn_check('clang-analyzer-valist.Unterminated'),
     analyzer_group_check('clang-analyzer-core.uninitialized'),
     analyzer_group_check('clang-analyzer-deadcode'),
+    analyzer_warn_check('clang-analyzer-security.insecureAPI.bcmp'),
+    analyzer_warn_check('clang-analyzer-security.insecureAPI.bcopy'),
+    analyzer_warn_check('clang-analyzer-security.insecureAPI.bzero'),
     analyzer_warn_check('clang-analyzer-security.insecureAPI.strcpy'),
     analyzer_group_high('clang-analyzer-security.insecureAPI'),
     analyzer_group_high('clang-analyzer-security'),