Merge "Add documents-archive to SDK, docs builds" into nyc-dev
am: 88b0d6a63d

* commit '88b0d6a63d8149d14032d298eccde79a6d3079d4':
  Add documents-archive to SDK, docs builds

Change-Id: I2c07f2f20da849174d23dfe6ab5c81008c37c6b1
diff --git a/buildspec.mk.default b/buildspec.mk.default
index d14208e..3224d1a 100644
--- a/buildspec.mk.default
+++ b/buildspec.mk.default
@@ -61,9 +61,6 @@
 # will be added to LOCAL_CFLAGS when building the module.
 #DEBUG_MODULE_ModuleName:=true
 
-# Specify an alternative tool chain prefix if needed.
-#TARGET_TOOLS_PREFIX:=
-
 # Specify the extra CFLAGS to use when building a module whose
 # DEBUG_MODULE_ variable is set.  Host and device flags are handled
 # separately.
diff --git a/core/Makefile b/core/Makefile
index f377051..6dd8a81 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -420,9 +420,9 @@
 pdk_fusion_log_tags_file := $(patsubst $(PRODUCT_OUT)/%,$(_pdk_fusion_intermediates)/%,$(filter $(event_log_tags_file),$(ALL_PDK_FUSION_FILES)))
 
 $(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) $(pdk_fusion_log_tags_file)
-$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file)
+$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
-	$(hide) build/tools/merge-event-log-tags.py -o $@ $(PRIVATE_SRC_FILES)
+	$(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES)
 
 # Include tags from all packages included in this product, plus all
 # tags that are part of the system (ie, not in a vendor/ or device/
@@ -436,9 +436,9 @@
 
 $(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) $(pdk_fusion_log_tags_file)
 $(event_log_tags_file): PRIVATE_MERGED_FILE := $(all_event_log_tags_file)
-$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file)
+$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
-	$(hide) build/tools/merge-event-log-tags.py -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES)
+	$(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES)
 
 event-log-tags: $(event_log_tags_file)
 
@@ -468,7 +468,6 @@
 # -----------------------------------------------------------------
 # the ramdisk
 INTERNAL_RAMDISK_FILES := $(filter $(TARGET_ROOT_OUT)/%, \
-	$(ALL_PREBUILT) \
 	$(ALL_GENERATED_SOURCES) \
 	$(ALL_DEFAULT_INSTALLED_MODULES))
 
@@ -493,8 +492,28 @@
 	$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
 	--kernel $(INSTALLED_KERNEL_TARGET)
 
+INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS := \
+	--kernel $(INSTALLED_KERNEL_TARGET) \
+	--rootfs_with_hashes $(PRODUCT_OUT)/system.img
+
+ifdef BOARD_BVB_ROLLBACK_INDEX
+INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS += \
+	--rollback_index $(BOARD_BVB_ROLLBACK_INDEX)
+endif
+
+ifndef BOARD_BVB_KEY_PATH
+# If key path isn't specified, use the 4096-bit test key.
+INTERNAL_BVBTOOL_SIGN_BOOT_IMAGE_ARGS := --algorithm SHA256_RSA4096 \
+	--key system/bvb/test/testkey_rsa4096.pem
+else
+INTERNAL_BVBTOOL_SIGN_BOOT_IMAGE_ARGS := \
+	--algorithm $(BOARD_BVB_ALGORITHM) --key $(BOARD_BVB_KEY_PATH)
+endif
+
+
 ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
+INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS += --initrd $(INSTALLED_RAMDISK_TARGET)
 endif
 
 INTERNAL_BOOTIMAGE_FILES := $(filter-out --%,$(INTERNAL_BOOTIMAGE_ARGS))
@@ -502,6 +521,7 @@
 BOARD_KERNEL_CMDLINE := $(strip $(BOARD_KERNEL_CMDLINE))
 ifdef BOARD_KERNEL_CMDLINE
   INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
+  INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS += --kernel_cmdline "$(BOARD_KERNEL_CMDLINE)"
 endif
 
 BOARD_KERNEL_BASE := $(strip $(BOARD_KERNEL_BASE))
@@ -527,6 +547,23 @@
 endif
 endif
 
+ifeq ($(BOARD_BVB_ENABLE),true)
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(BVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(PRODUCT_OUT)/system.img
+	$(call pretty,"Target boot image: $@")
+	$(hide) $(BVBTOOL) make_boot_image $(INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS) $(BOARD_BVB_MAKE_BOOT_IMAGE_ARGS) --output $@
+	$(hide) $(BVBTOOL) sign_boot_image $(INTERNAL_BVBTOOL_SIGN_BOOT_IMAGE_ARGS) $(BOARD_BVB_SIGN_BOOT_IMAGE_ARGS) --image $@
+	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+
+.PHONY: bootimage-nodeps
+bootimage-nodeps: $(BVBTOOL)
+	@echo "make $@: ignoring dependencies"
+	$(hide) $(BVBTOOL) make_boot_image $(INTERNAL_BVBTOOL_MAKE_BOOT_IMAGE_ARGS) $(BOARD_BVB_MAKE_BOOT_IMAGE_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
+	$(hide) $(BVBTOOL) sign_boot_image $(INTERNAL_BVBTOOL_SIGN_BOOT_IMAGE_ARGS) $(BOARD_BVB_SIGN_BOOT_IMAGE_ARGS) --image $(INSTALLED_BOOTIMAGE_TARGET)
+	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+
+else # BOARD_BVB_ENABLE
+
 # We build recovery as boot image if BOARD_USES_RECOVERY_AS_BOOT is true.
 ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
@@ -576,6 +613,7 @@
 
 endif # TARGET_BOOTIMAGE_USE_EXT2
 endif # BOARD_USES_RECOVERY_AS_BOOT
+endif # BOARD_BVB_ENABLE
 
 else	# TARGET_NO_KERNEL
 # HACK: The top-level targets depend on the bootimage.  Not all targets
@@ -664,7 +702,7 @@
 $(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
 installed_notice_html_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
-$(installed_notice_html_gz): $(target_notice_file_html_gz) | $(ACP)
+$(installed_notice_html_gz): $(target_notice_file_html_gz)
 	$(copy-file-to-target)
 
 # if we've been run my mm, mmm, etc, don't reinstall this every time
@@ -1046,7 +1084,6 @@
         $(ALL_PDK_FUSION_FILES))
 
 INTERNAL_SYSTEMIMAGE_FILES := $(filter $(TARGET_OUT)/%, \
-    $(ALL_PREBUILT) \
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES) \
     $(PDK_FUSION_SYSIMG_FILES) \
@@ -1114,9 +1151,14 @@
            fi; \
            mkdir -p $(DIST_DIR); cp $(INSTALLED_FILES_FILE) $(DIST_DIR)/installed-files-rescued.txt; \
            exit 1 )
+  $(if $(BOARD_BVB_ENABLE), $(hide) $(BVBTOOL) add_image_hashes $(BOARD_BVB_ADD_IMAGE_HASHES_ARGS) --image $(1))
 endef
 
-$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE)
+ifeq ($(BOARD_BVB_ENABLE),true)
+FULL_SYSTEMIMAGE_DEPS += $(BVBTOOL)
+endif
+
+$(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(BUILD_IMAGE_SRCS)
 	$(call build-systemimage-target,$@)
 
 INSTALLED_SYSTEMIMAGE := $(PRODUCT_OUT)/system.img
@@ -1144,7 +1186,7 @@
 endif
 
 
-$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH) | $(ACP)
+$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH)
 	@echo "Install system fs image: $@"
 	$(copy-file-to-target)
 	$(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
@@ -1330,7 +1372,8 @@
 # We just build this directly to the install location.
 INSTALLED_USERDATAIMAGE_TARGET := $(BUILT_USERDATAIMAGE_TARGET)
 $(INSTALLED_USERDATAIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) \
-                                   $(INTERNAL_USERDATAIMAGE_FILES)
+                                   $(INTERNAL_USERDATAIMAGE_FILES) \
+                                   $(BUILD_IMAGE_SRCS)
 	$(build-userdataimage-target)
 
 .PHONY: userdataimage-nodeps
@@ -1362,6 +1405,36 @@
 
 
 # -----------------------------------------------------------------
+# partition table image
+ifdef BOARD_BPT_INPUT_FILES
+
+BUILT_BPTIMAGE_TARGET := $(PRODUCT_OUT)/partition-table.img
+
+INTERNAL_BVBTOOL_MAKE_TABLE_ARGS := \
+	--output_gpt $(BUILT_BPTIMAGE_TARGET) \
+	--output_json $(PRODUCT_OUT)/partition-table.bpt \
+	$(foreach file, $(BOARD_BPT_INPUT_FILES), --input $(file))
+
+ifdef BOARD_BPT_DISK_SIZE
+INTERNAL_BVBTOOL_MAKE_TABLE_ARGS += --disk_size $(BOARD_BPT_DISK_SIZE)
+endif
+
+define build-bptimage-target
+  $(call pretty,"Target partition table image: $(INSTALLED_BPTIMAGE_TARGET)")
+  $(hide) $(BPTTOOL) make_table $(INTERNAL_BVBTOOL_MAKE_TABLE_ARGS) $(BOARD_BPT_MAKE_TABLE_ARGS)
+endef
+
+INSTALLED_BPTIMAGE_TARGET := $(BUILT_BPTIMAGE_TARGET)
+$(INSTALLED_BPTIMAGE_TARGET): $(BPTTOOL) $(BOARD_BPT_INPUT_FILES)
+	$(build-bptimage-target)
+
+.PHONY: bptimage-nodeps
+bptimage-nodeps:
+	$(build-bptimage-target)
+
+endif # BOARD_BPT_INPUT_FILES
+
+# -----------------------------------------------------------------
 # cache partition image
 ifdef BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE
 INTERNAL_CACHEIMAGE_FILES := \
@@ -1384,7 +1457,7 @@
 
 # We just build this directly to the install location.
 INSTALLED_CACHEIMAGE_TARGET := $(BUILT_CACHEIMAGE_TARGET)
-$(INSTALLED_CACHEIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES)
+$(INSTALLED_CACHEIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES) $(BUILD_IMAGE_SRCS)
 	$(build-cacheimage-target)
 
 .PHONY: cacheimage-nodeps
@@ -1429,7 +1502,7 @@
 
 # We just build this directly to the install location.
 INSTALLED_VENDORIMAGE_TARGET := $(BUILT_VENDORIMAGE_TARGET)
-$(INSTALLED_VENDORIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(INSTALLED_FILES_FILE_VENDOR)
+$(INSTALLED_VENDORIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(INSTALLED_FILES_FILE_VENDOR) $(BUILD_IMAGE_SRCS)
 	$(build-vendorimage-target)
 
 .PHONY: vendorimage-nodeps
@@ -1511,8 +1584,6 @@
   $(HOST_LIBRARY_PATH)/libcutils$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libselinux$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libcrypto-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libdivsufsort$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libdivsufsort64$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libext2fs-host$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libext2_blkid-host$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libext2_com_err-host$(HOST_SHLIB_SUFFIX) \
@@ -1603,10 +1674,14 @@
 $(BUILT_TARGET_FILES_PACKAGE): tool_extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
 endif
 
+ifeq ($(AB_OTA_UPDATER),true)
+# Build zlib fingerprint if using the AB Updater.
+updater_dep := $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint
+else
 # Build OTA tools if not using the AB Updater.
-ifneq ($(AB_OTA_UPDATER),true)
-$(BUILT_TARGET_FILES_PACKAGE): $(built_ota_tools)
+updater_dep := $(built_ota_tools)
 endif
+$(BUILT_TARGET_FILES_PACKAGE): $(updater_dep)
 
 # If we are using recovery as boot, output recovery files to BOOT/.
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -1629,6 +1704,7 @@
 		$(SELINUX_FC) \
 		$(APKCERTS_FILE) \
 		$(HOST_OUT_EXECUTABLES)/fs_config \
+		build/tools/releasetools/add_img_to_target_files \
 		| $(ACP)
 	@echo "Package target files: $@"
 	$(hide) rm -rf $@ $(zip_root)
@@ -1770,6 +1846,23 @@
 ifeq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
 	$(hide) echo "full_recovery_image=true" >> $(zip_root)/META/misc_info.txt
 endif
+ifeq ($(BOARD_BVB_ENABLE),true)
+	$(hide) echo "board_bvb_enable=true" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_make_boot_image_args=$(BOARD_BVB_MAKE_BOOT_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_sign_boot_image_args=$(BOARD_BVB_SIGN_BOOT_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_algorithm=$(BOARD_BVB_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_key_path=$(BOARD_BVB_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_rollback_index=$(BOARD_BVB_ROLLBACK_INDEX)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bvb_add_image_hashes_args=$(BOARD_BVB_ADD_IMAGE_HASHES_ARGS)" >> $(zip_root)/META/misc_info.txt
+endif
+ifdef BOARD_BPT_INPUT_FILES
+	$(hide) echo "board_bpt_enable=true" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bpt_make_table_args=$(BOARD_BPT_MAKE_TABLE_ARGS)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "board_bpt_input_files=$(BOARD_BPT_INPUT_FILES)" >> $(zip_root)/META/misc_info.txt
+endif
+ifdef BOARD_BPT_DISK_SIZE
+	$(hide) echo "board_bpt_disk_size=$(BOARD_BPT_DISK_SIZE)" >> $(zip_root)/META/misc_info.txt
+endif
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
@@ -1778,6 +1871,7 @@
 ifeq ($(AB_OTA_UPDATER),true)
 	@# When using the A/B updater, include the updater config files in the zip.
 	$(hide) $(ACP) $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
+	$(hide) $(ACP) $(TARGET_OUT_COMMON_GEN)/zlib_fingerprint $(zip_root)/META/zlib_fingerprint.txt
 	$(hide) for part in $(AB_OTA_PARTITIONS); do \
 	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
 	done
@@ -1787,6 +1881,17 @@
 	@# Include the build type in META/misc_info.txt so the server can easily differentiate production builds.
 	$(hide) echo "build_type=$(TARGET_BUILD_VARIANT)" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "ab_update=true" >> $(zip_root)/META/misc_info.txt
+ifdef BRILLO_VENDOR_PARTITIONS
+	$(hide) mkdir -p $(zip_root)/VENDOR_IMAGES
+	$(hide) for f in $(BRILLO_VENDOR_PARTITIONS); do \
+	  pair1="$$(echo $$f | awk -F':' '{print $$1}')"; \
+	  pair2="$$(echo $$f | awk -F':' '{print $$2}')"; \
+	  src=$${pair1}/$${pair2}; \
+	  dest=$(zip_root)/VENDOR_IMAGES/$${pair2}; \
+	  mkdir -p $$(dirname "$${dest}"); \
+	  $(ACP) $${src} $${dest}; \
+	done;
+endif
 ifdef OSRELEASED_DIRECTORY
 	$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
 	$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
@@ -1837,7 +1942,8 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
+		build/tools/releasetools/ota_from_target_files
 	@echo "Package OTA: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/ota_from_target_files -v \
@@ -1863,7 +1969,8 @@
 
 INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 
-$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE)
+$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
+		build/tools/releasetools/img_from_target_files
 	@echo "Package: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/img_from_target_files -v \
@@ -1886,7 +1993,11 @@
 SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
 # For apps_only build we'll establish the dependency later in build/core/main.mk.
 ifndef TARGET_BUILD_APPS
-$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_BOOTIMAGE_TARGET)
+$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE) \
+		$(INSTALLED_BOOTIMAGE_TARGET) \
+		$(INSTALLED_USERDATAIMAGE_TARGET) \
+		$(INSTALLED_VENDORIMAGE_TARGET) \
+		$(updater_dep)
 endif
 $(SYMBOLS_ZIP):
 	@echo "Package symbols: $@"
@@ -2017,7 +2128,6 @@
 # if we don't have a real list, then use "everything"
 ifeq ($(strip $(ATREE_FILES)),)
 ATREE_FILES := \
-	$(ALL_PREBUILT) \
 	$(ALL_DEFAULT_INSTALLED_MODULES) \
 	$(INSTALLED_RAMDISK_TARGET) \
 	$(ALL_DOCS) \
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 6722af4..f88fd89 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -55,8 +55,13 @@
 ifeq ($(my_host_cross),true)
   my_module_tags :=
 endif
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ifdef LOCAL_2ND_ARCH_VAR_PREFIX
+# Don't pull in modules by tags if this is for translation TARGET_2ND_ARCH.
+  my_module_tags :=
+endif
+endif
 
-ifdef BUILDING_WITH_NINJA
 # Ninja has an implicit dependency on the command being run, and kati will
 # regenerate the ninja manifest if any read makefile changes, so there is no
 # need to have dependencies on makefiles.
@@ -64,7 +69,6 @@
 # a .mk file, because a few users of LOCAL_ADDITIONAL_DEPENDENCIES don't include
 # base_rules.mk, but it will fix the most common ones.
 LOCAL_ADDITIONAL_DEPENDENCIES := $(filter-out %.mk,$(LOCAL_ADDITIONAL_DEPENDENCIES))
-endif
 
 ###########################################################
 ## Validate and define fallbacks for input LOCAL_* variables.
@@ -125,7 +129,17 @@
 my_32_64_bit_suffix := $(if $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT),64,32)
 
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+# When in TARGET_TRANSLATE_2ND_ARCH both TARGET_ARCH and TARGET_2ND_ARCH are 32-bit,
+# to avoid path conflict we force using LOCAL_MODULE_PATH_64 for the first arch.
+ifdef LOCAL_2ND_ARCH_VAR_PREFIX
+my_multilib_module_path := $(LOCAL_MODULE_PATH_32)
+else  # ! LOCAL_2ND_ARCH_VAR_PREFIX
+my_multilib_module_path := $(LOCAL_MODULE_PATH_64)
+endif  # ! LOCAL_2ND_ARCH_VAR_PREFIX
+else  # ! TARGET_TRANSLATE_2ND_ARCH
 my_multilib_module_path := $(strip $(LOCAL_MODULE_PATH_$(my_32_64_bit_suffix)))
+endif # ! TARGET_TRANSLATE_2ND_ARCH
 ifdef my_multilib_module_path
 my_module_path := $(my_multilib_module_path)
 else
@@ -288,6 +302,12 @@
 .PHONY: $(my_register_name)
 $(my_register_name): $(LOCAL_BUILT_MODULE) $(LOCAL_INSTALLED_MODULE)
 
+ifneq ($(my_register_name),$(LOCAL_MODULE))
+# $(LOCAL_MODULE) covers all the multilib targets.
+.PHONY: $(LOCAL_MODULE)
+$(LOCAL_MODULE) : $(my_register_name)
+endif
+
 # Set up phony targets that covers all modules under the given paths.
 # This allows us to build everything in given paths by running mmma/mma.
 my_path_components := $(subst /,$(space),$(LOCAL_PATH))
@@ -301,45 +321,30 @@
 ## Module installation rule
 ###########################################################
 
-# Some hosts do not have ACP; override the LOCAL version if that's the case.
-ifneq ($(strip $(HOST_ACP_UNAVAILABLE)),)
-  LOCAL_ACP_UNAVAILABLE := $(strip $(HOST_ACP_UNAVAILABLE))
-endif
-
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
-  # Define a copy rule to install the module.
-  # acp and libraries that it uses can't use acp for
-  # installation;  hence, LOCAL_ACP_UNAVAILABLE.
 $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
-ifneq ($(LOCAL_ACP_UNAVAILABLE),true)
-$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE) | $(ACP)
+$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 	$(PRIVATE_POST_INSTALL_CMD)
-else
-$(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
-	@echo "Install: $@"
-	$(copy-file-to-target-with-cp)
-endif
 
 # Rule to install the module's companion init.rc.
-my_init_rc := $(LOCAL_INIT_RC_$(my_32_64_bit_suffix))
-my_init_rc_src :=
 my_init_rc_installed :=
-ifndef my_init_rc
-my_init_rc := $(LOCAL_INIT_RC)
+my_init_rc_pairs :=
+my_init_rc := $(LOCAL_INIT_RC_$(my_32_64_bit_suffix))
+ifneq ($(my_init_rc),)
+my_init_rc_pairs += $(LOCAL_PATH)/$(my_init_rc):$(TARGET_OUT$(partition_tag)_ETC)/init/$(notdir $(my_init_rc))
+endif
+ifneq ($(LOCAL_INIT_RC),)
+my_init_rc_pairs += $(LOCAL_PATH)/$(LOCAL_INIT_RC):$(TARGET_OUT$(partition_tag)_ETC)/init/$(notdir $(LOCAL_INIT_RC))
 # Make sure we don't define the rule twice in multilib module.
 LOCAL_INIT_RC :=
 endif
-ifdef my_init_rc
-my_init_rc_src := $(LOCAL_PATH)/$(my_init_rc)
-my_init_rc_installed := $(TARGET_OUT$(partition_tag)_ETC)/init/$(notdir $(my_init_rc_src))
-$(my_init_rc_installed) : $(my_init_rc_src) | $(ACP)
-	@echo "Install: $@"
-	$(copy-file-to-new-target)
+ifneq ($(my_init_rc_pairs),)
+my_init_rc_installed := $(call copy-many-files,$(my_init_rc_pairs))
 
 $(my_register_name) : $(my_init_rc_installed)
-endif # my_init_rc
+endif # my_init_rc_pairs
 endif # !LOCAL_UNINSTALLABLE_MODULE
 
 ###########################################################
@@ -436,7 +441,7 @@
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED := \
     $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) \
     $(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE) \
-    $(addprefix $(my_init_rc_src):,$(my_init_rc_installed)))
+    $(my_init_rc_pairs))
 endif
 ifdef LOCAL_PICKUP_FILES
 # Files or directories ready to pick up by the build system
diff --git a/core/binary.mk b/core/binary.mk
index 918a28d..853f270 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -53,7 +53,7 @@
 my_c_includes := $(LOCAL_C_INCLUDES)
 my_generated_sources := $(LOCAL_GENERATED_SOURCES)
 my_native_coverage := $(LOCAL_NATIVE_COVERAGE)
-my_additional_dependencies := $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+my_additional_dependencies := $(LOCAL_ADDITIONAL_DEPENDENCIES)
 my_export_c_include_dirs := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
 
 ifdef LOCAL_IS_HOST_MODULE
@@ -66,14 +66,23 @@
 my_ndk_sysroot_include :=
 my_ndk_sysroot_lib :=
 ifdef LOCAL_SDK_VERSION
-  ifdef LOCAL_NDK_VERSION
-    $(error $(LOCAL_PATH): LOCAL_NDK_VERSION is now retired.)
-  endif
   ifdef LOCAL_IS_HOST_MODULE
     $(error $(LOCAL_PATH): LOCAL_SDK_VERSION cannot be used in host module)
   endif
-  my_ndk_source_root := $(HISTORICAL_NDK_VERSIONS_ROOT)/current/sources
-  my_ndk_sysroot := $(HISTORICAL_NDK_VERSIONS_ROOT)/current/platforms/android-$(LOCAL_SDK_VERSION)/arch-$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+
+  # mips32r6 is not supported by the NDK. No released NDK contains these
+  # libraries, but the r10 in prebuilts/ndk had a local hack to add them :(
+  #
+  # We need to find a real solution to this problem, but until we do just drop
+  # mips32r6 things back to r10 to get the tree building again.
+  ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
+    ifeq ($(LOCAL_NDK_VERSION), current)
+      LOCAL_NDK_VERSION := r10
+    endif
+  endif
+
+  my_ndk_source_root := $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
+  my_ndk_sysroot := $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/platforms/android-$(LOCAL_SDK_VERSION)/arch-$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
   my_ndk_sysroot_include := $(my_ndk_sysroot)/usr/include
 
   # x86_64 and and mips64 are both multilib toolchains, so their libraries are
@@ -131,17 +140,30 @@
     my_system_shared_libraries += libstdc++
     ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
       my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
+      my_ldlibs += -ldl
     else
       my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_shared.so
       my_ndk_stl_shared_lib := -lstlport_shared
     endif
   else # LOCAL_NDK_STL_VARIANT is not stlport_* either
   ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT)))
-    my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include \
-                               $(my_ndk_source_root)/cxx-stl/llvm-libc++/gabi++/include \
-                               $(my_ndk_source_root)/android/support/include
+    my_ndk_stl_include_path := \
+      $(my_ndk_source_root)/cxx-stl/llvm-libc++/libcxx/include \
+      $(my_ndk_source_root)/android/support/include \
+
+    # Pre-r11 NDKs used libgabi++ for libc++'s C++ ABI, but r11 and later use
+    # libc++abi.
+    ifeq ($(LOCAL_NDK_VERSION),r10)
+      my_ndk_stl_include_path += \
+        $(my_ndk_source_root)/cxx-stl/llvm-libc++/gabi++/include
+    else
+      my_ndk_stl_include_path += \
+        $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/libcxxabi/include
+    endif
+
     ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT))
       my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)/libc++_static.a
+      my_ldlibs += -ldl
     else
       my_ndk_stl_shared_lib_fullpath := $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant)/libc++_shared.so
       my_ndk_stl_shared_lib := -lc++_shared
@@ -212,10 +234,15 @@
 # clang is enabled by default for host builds
 # enable it unless we've specifically disabled clang above
 ifdef LOCAL_IS_HOST_MODULE
-    ifneq ($($(my_prefix)OS),windows)
-    ifeq ($(my_clang),)
-        my_clang := true
-    endif
+    ifeq ($($(my_prefix)OS),windows)
+        ifeq ($(my_clang),true)
+            $(error $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Clang is not yet supported for windows binaries)
+        endif
+        my_clang := false
+    else
+        ifeq ($(my_clang),)
+            my_clang := true
+        endif
     endif
 # Add option to make gcc the default for device build
 else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
@@ -385,7 +412,7 @@
 ifeq ($(my_clang),true)
     my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBPROFILE_RT)
 else
-    my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCOV)
+    my_coverage_lib := $(call intermediates-dir-for,STATIC_LIBRARIES,libgcov,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libgcov.a
 endif
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_COVERAGE_LIB := $(my_coverage_lib)
@@ -468,7 +495,7 @@
 # up if --no-undefined is passed to the linker.
 ifeq ($(strip $(LOCAL_NO_DEFAULT_COMPILER_FLAGS)),)
 ifeq ($(my_allow_undefined_symbols),)
-  my_ldflags +=  $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)NO_UNDEFINED_LDFLAGS)
+  my_ldflags += -Wl,--no-undefined
 endif
 endif
 
@@ -530,7 +557,7 @@
 
 my_gen_sources_copy := $(patsubst $(generated_sources_dir)/%,$(intermediates)/%,$(filter $(generated_sources_dir)/%,$(my_generated_sources)))
 
-$(my_gen_sources_copy): $(intermediates)/% : $(generated_sources_dir)/% | $(ACP)
+$(my_gen_sources_copy): $(intermediates)/% : $(generated_sources_dir)/%
 	@echo "Copy: $@"
 	$(copy-file-to-target)
 
@@ -629,57 +656,57 @@
 ## Compile the .proto files to .cc (or .c) and then to .o
 ###########################################################
 proto_sources := $(filter %.proto,$(my_src_files))
-proto_generated_objects :=
-proto_generated_headers :=
 ifneq ($(proto_sources),)
-proto_generated_sources_dir := $(generated_sources_dir)/proto
-proto_generated_obj_dir := $(intermediates)/proto
+proto_gen_dir := $(generated_sources_dir)/proto
+proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources))
 
+my_rename_cpp_ext :=
 ifneq (,$(filter nanopb-c nanopb-c-enable_malloc, $(LOCAL_PROTOC_OPTIMIZE_TYPE)))
 my_proto_source_suffix := .c
 my_proto_c_includes := external/nanopb-c
-my_protoc_flags := --nanopb_out=$(proto_generated_sources_dir) \
+my_protoc_flags := --nanopb_out=$(proto_gen_dir) \
     --plugin=external/nanopb-c/generator/protoc-gen-nanopb
+my_protoc_deps := $(NANOPB_SRCS) $(proto_sources_fullpath:%.proto=%.options)
 else
-my_proto_source_suffix := .cc
+my_proto_source_suffix := $(LOCAL_CPP_EXTENSION)
+ifneq ($(my_proto_source_suffix),.cc)
+# aprotoc is hardcoded to write out only .cc file.
+# We need to rename the extension to $(LOCAL_CPP_EXTENSION) if it's not .cc.
+my_rename_cpp_ext := true
+endif
 my_proto_c_includes := external/protobuf/src
 my_cflags += -DGOOGLE_PROTOBUF_NO_RTTI
-my_protoc_flags := --cpp_out=$(proto_generated_sources_dir)
+my_protoc_flags := --cpp_out=$(proto_gen_dir)
+my_protoc_deps :=
 endif
-my_proto_c_includes += $(proto_generated_sources_dir)
+my_proto_c_includes += $(proto_gen_dir)
 
-proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources))
-proto_generated_sources := $(addprefix $(proto_generated_sources_dir)/, \
+proto_generated_cpps := $(addprefix $(proto_gen_dir)/, \
     $(patsubst %.proto,%.pb$(my_proto_source_suffix),$(proto_sources_fullpath)))
-proto_generated_headers := $(patsubst %.pb$(my_proto_source_suffix),%.pb.h, $(proto_generated_sources))
-proto_generated_objects := $(addprefix $(proto_generated_obj_dir)/, \
-    $(patsubst %.proto,%.pb.o,$(proto_sources_fullpath)))
-$(call track-src-file-obj,$(proto_sources),$(proto_generated_objects))
 
 # Ensure the transform-proto-to-cc rule is only defined once in multilib build.
-ifndef $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined
-$(proto_generated_sources): PRIVATE_PROTO_INCLUDES := $(TOP)
-$(proto_generated_sources): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS) $(my_protoc_flags)
-$(proto_generated_sources): $(proto_generated_sources_dir)/%.pb$(my_proto_source_suffix): %.proto $(PROTOC)
+ifndef $(my_host)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined
+$(proto_generated_cpps): PRIVATE_PROTO_INCLUDES := $(TOP)
+$(proto_generated_cpps): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS) $(my_protoc_flags)
+$(proto_generated_cpps): PRIVATE_RENAME_CPP_EXT := $(my_rename_cpp_ext)
+$(proto_generated_cpps): $(proto_gen_dir)/%.pb$(my_proto_source_suffix): %.proto $(my_protoc_deps) $(PROTOC)
 	$(transform-proto-to-cc)
 
-# This is just a dummy rule to make sure gmake doesn't skip updating the dependents.
-$(proto_generated_headers): $(proto_generated_sources_dir)/%.pb.h: $(proto_generated_sources_dir)/%.pb$(my_proto_source_suffix)
-	@echo "Updated header file $@."
-	$(hide) touch $@
-
-$(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined := true
-endif  # transform-proto-to-cc rule included only once
-
-$(proto_generated_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(proto_generated_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(proto_generated_objects): $(proto_generated_obj_dir)/%.o: $(proto_generated_sources_dir)/%$(my_proto_source_suffix) $(proto_generated_headers)
-ifeq ($(my_proto_source_suffix),.c)
-	$(transform-$(PRIVATE_HOST)c-to-o)
-else
-	$(transform-$(PRIVATE_HOST)cpp-to-o)
+$(my_host)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined := true
 endif
-$(call include-depfiles-for-objs, $(proto_generated_objects))
+# Ideally we can generate the source directly into $(intermediates).
+# But many Android.mks assume the .pb.hs are in $(generated_sources_dir).
+# As a workaround, we make a copy in the $(intermediates).
+proto_intermediate_dir := $(intermediates)/proto
+proto_intermediate_cpps := $(patsubst $(proto_gen_dir)/%,$(proto_intermediate_dir)/%,\
+    $(proto_generated_cpps))
+$(proto_intermediate_cpps) : $(proto_intermediate_dir)/% : $(proto_gen_dir)/%
+	@echo "Copy: $@"
+	$(copy-file-to-target)
+	$(hide) cp $(basename $<).h $(basename $@).h
+$(call track-src-file-gen,$(proto_sources),$(proto_intermediate_cpps))
+
+my_generated_sources += $(proto_intermediate_cpps)
 
 my_c_includes += $(my_proto_c_includes)
 # Auto-export the generated proto source dir.
@@ -831,7 +858,7 @@
     $(intermediates)/,$(y_yacc_sources:.y=.c))
 ifneq ($(y_yacc_cs),)
 $(y_yacc_cs): $(intermediates)/%.c: \
-    $(TOPDIR)$(LOCAL_PATH)/%.y \
+    $(TOPDIR)$(LOCAL_PATH)/%.y $(BISON) $(BISON_DATA) \
     $(my_additional_dependencies)
 	$(call transform-y-to-c-or-cpp)
 $(call track-src-file-gen,$(y_yacc_sources),$(y_yacc_cs))
@@ -844,7 +871,7 @@
     $(intermediates)/,$(yy_yacc_sources:.yy=$(LOCAL_CPP_EXTENSION)))
 ifneq ($(yy_yacc_cpps),)
 $(yy_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
-    $(TOPDIR)$(LOCAL_PATH)/%.yy \
+    $(TOPDIR)$(LOCAL_PATH)/%.yy $(BISON) $(BISON_DATA) \
     $(my_additional_dependencies)
 	$(call transform-y-to-c-or-cpp)
 $(call track-src-file-gen,$(yy_yacc_sources),$(yy_yacc_cpps))
@@ -897,7 +924,7 @@
 dotdot_arm_objects :=
 $(foreach s,$(dotdot_arm_sources),\
   $(eval $(call compile-dotdot-cpp-file,$(s),\
-  $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
+  $(my_additional_dependencies),\
   dotdot_arm_objects)))
 $(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
 
@@ -905,7 +932,7 @@
 dotdot_objects :=
 $(foreach s,$(dotdot_sources),\
   $(eval $(call compile-dotdot-cpp-file,$(s),\
-    $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
+    $(my_additional_dependencies),\
     dotdot_objects)))
 $(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
 
@@ -923,7 +950,6 @@
 ifneq ($(strip $(cpp_objects)),)
 $(cpp_objects): $(intermediates)/%.o: \
     $(TOPDIR)$(LOCAL_PATH)/%$(LOCAL_CPP_EXTENSION) \
-    $(yacc_cpps) $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)cpp-to-o)
 $(call include-depfiles-for-objs, $(cpp_objects))
@@ -945,8 +971,7 @@
 $(gen_cpp_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
 $(gen_cpp_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
 $(gen_cpp_objects): $(intermediates)/%.o: \
-    $(intermediates)/%$(LOCAL_CPP_EXTENSION) $(yacc_cpps) \
-    $(proto_generated_headers) \
+    $(intermediates)/%$(LOCAL_CPP_EXTENSION) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)cpp-to-o)
 $(call include-depfiles-for-objs, $(gen_cpp_objects))
@@ -1001,7 +1026,7 @@
 dotdot_arm_objects :=
 $(foreach s,$(dotdot_arm_sources),\
   $(eval $(call compile-dotdot-c-file,$(s),\
-    $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
+    $(my_additional_dependencies),\
     dotdot_arm_objects)))
 $(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
 
@@ -1009,7 +1034,7 @@
 dotdot_objects :=
 $(foreach s, $(dotdot_sources),\
   $(eval $(call compile-dotdot-c-file,$(s),\
-    $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
+    $(my_additional_dependencies),\
     dotdot_objects)))
 $(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
 
@@ -1025,7 +1050,7 @@
 c_objects        := $(c_arm_objects) $(c_normal_objects)
 
 ifneq ($(strip $(c_objects)),)
-$(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c $(yacc_cpps) $(proto_generated_headers) \
+$(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)c-to-o)
 $(call include-depfiles-for-objs, $(c_objects))
@@ -1046,7 +1071,7 @@
 # TODO: support compiling certain generated files as arm.
 $(gen_c_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
 $(gen_c_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(gen_c_objects): $(intermediates)/%.o: $(intermediates)/%.c $(yacc_cpps) $(proto_generated_headers) \
+$(gen_c_objects): $(intermediates)/%.o: $(intermediates)/%.c \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)c-to-o)
 $(call include-depfiles-for-objs, $(gen_c_objects))
@@ -1061,7 +1086,7 @@
 $(call track-src-file-obj,$(objc_sources),$(objc_objects))
 
 ifneq ($(strip $(objc_objects)),)
-$(objc_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.m $(yacc_cpps) $(proto_generated_headers) \
+$(objc_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.m \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)m-to-o)
 $(call include-depfiles-for-objs, $(objc_objects))
@@ -1076,7 +1101,7 @@
 $(call track-src-file-obj,$(objcpp_sources),$(objcpp_objects))
 
 ifneq ($(strip $(objcpp_objects)),)
-$(objcpp_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.mm $(yacc_cpps) $(proto_generated_headers) \
+$(objcpp_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.mm \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)mm-to-o)
 $(call include-depfiles-for-objs, $(objcpp_objects))
@@ -1176,7 +1201,7 @@
     $(foreach l, $(my_static_libraries) $(my_whole_static_libraries), \
       $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes))
 $(import_includes): PRIVATE_IMPORT_EXPORT_INCLUDES := $(import_includes_deps)
-$(import_includes) : $(LOCAL_MODULE_MAKEFILE_DEP) $(import_includes_deps)
+$(import_includes) : $(import_includes_deps)
 	@echo Import includes file: $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 ifdef import_includes_deps
@@ -1206,8 +1231,7 @@
     $(c_objects) \
     $(gen_c_objects) \
     $(objc_objects) \
-    $(objcpp_objects) \
-    $(proto_generated_objects)
+    $(objcpp_objects)
 
 new_order_normal_objects := $(foreach f,$(my_src_files),$(my_src_file_obj_$(f)))
 new_order_normal_objects += $(foreach f,$(my_gen_src_files),$(my_src_file_obj_$(f)))
@@ -1243,11 +1267,7 @@
 # that custom build rules which generate .o files don't consume other generated
 # sources as input (or if they do they take care of that dependency themselves).
 $(normal_objects) : | $(my_generated_sources)
-ifeq ($(BUILDING_WITH_NINJA),true)
 $(all_objects) : $(import_includes)
-else
-$(all_objects) : | $(import_includes)
-endif
 ALL_C_CPP_ETC_OBJECTS += $(all_objects)
 
 
@@ -1362,6 +1382,10 @@
 my_cppflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flags,$(my_cppflags))
 my_asflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flags,$(my_asflags))
 my_ldflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flags,$(my_ldflags))
+else
+# gcc does not handle hidden functions in a manner compatible with LLVM libcxx
+# see b/27908145
+my_cflags += -Wno-attributes
 endif
 
 ifeq ($(my_fdo_build), true)
@@ -1379,6 +1403,59 @@
 my_cppflags := $(filter-out $(my_illegal_flags),$(my_cppflags))
 my_conlyflags := $(filter-out $(my_illegal_flags),$(my_conlyflags))
 
+# We can enforce some rules more strictly in the code we own. my_strict
+# indicates if this is code that we can be stricter with. If we have rules that
+# we want to apply to *our* code (but maybe can't for vendor/device specific
+# things), we could extend this to be a ternary value.
+my_strict := true
+ifneq ($(filter external/%,$(LOCAL_PATH)),)
+    my_strict := false
+endif
+
+# Can be used to make some annotations stricter for code we can fix (such as
+# when we mark functions as deprecated).
+ifeq ($(my_strict),true)
+    my_cflags += -DANDROID_STRICT
+endif
+
+# Disable clang-tidy if it is not found.
+ifeq ($(PATH_TO_CLANG_TIDY),)
+  my_tidy_enabled := false
+else
+  # If LOCAL_TIDY is not defined, use global WITH_TIDY
+  my_tidy_enabled := $(LOCAL_TIDY)
+  ifeq ($(my_tidy_enabled),)
+    my_tidy_enabled := $(WITH_TIDY)
+  endif
+endif
+
+# my_tidy_checks is empty if clang-tidy is disabled.
+my_tidy_checks :=
+my_tidy_flags :=
+ifneq (,$(filter 1 true,$(my_tidy_enabled)))
+  ifneq ($(my_clang),true)
+    # Disable clang-tidy if clang is disabled.
+    my_tidy_enabled := false
+  else
+    tidy_only: $(cpp_objects) $(c_objects)
+    # Set up global default checks
+    my_tidy_checks := $(WITH_TIDY_CHECKS)
+    ifeq ($(my_tidy_checks),)
+      my_tidy_checks := $(call default_global_tidy_checks,$(LOCAL_PATH))
+    endif
+    # Append local clang-tidy checks.
+    ifneq ($(LOCAL_TIDY_CHECKS),)
+      my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
+    endif
+    # Set up global default clang-tidy flags, which is none.
+    my_tidy_flags := $(WITH_TIDY_FLAGS)
+    # Use local clang-tidy flags if specified.
+    ifneq ($(LOCAL_TIDY_FLAGS),)
+      my_tidy_flags := $(LOCAL_TIDY_FLAGS)
+    endif
+  endif
+endif
+
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_YACCFLAGS := $(LOCAL_YACCFLAGS)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASFLAGS := $(my_asflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CONLYFLAGS := $(my_conlyflags)
@@ -1392,13 +1469,15 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_IMPORT_INCLUDES := $(import_includes)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LDFLAGS := $(my_ldflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LDLIBS := $(my_ldlibs)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TIDY_CHECKS := $(my_tidy_checks)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TIDY_FLAGS := $(my_tidy_flags)
 
 # this is really the way to get the files onto the command line instead
 # of using $^, because then LOCAL_ADDITIONAL_DEPENDENCIES doesn't work
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_SHARED_LIBRARIES := $(built_shared_libraries)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_STATIC_LIBRARIES := $(built_static_libraries)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(built_whole_libraries)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_OBJECTS := $(all_objects)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_OBJECTS := $(strip $(all_objects))
 
 ###########################################################
 # Define library dependencies.
@@ -1420,29 +1499,39 @@
 ###########################################################
 export_includes := $(intermediates)/export_includes
 $(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(my_export_c_include_dirs)
-# Make sure .pb.h are already generated before any dependent source files get compiled.
-# Similarly, the generated DBus headers need to exist before we export their location.
-# People are not going to consume the aidl generated cpp file, but the cpp file is
-# generated after the headers, so this is a convenient way to ensure the headers exist.
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE_DEP) $(proto_generated_headers) $(dbus_generated_headers) $(aidl_gen_cpp) $(vts_gen_cpp)
+# Headers exported by whole static libraries are also exported by this library.
+export_include_deps := $(strip \
+   $(foreach l,$(my_whole_static_libraries), \
+     $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes))
+# Re-export requested headers from shared libraries.
+export_include_deps += $(strip \
+   $(foreach l,$(LOCAL_EXPORT_SHARED_LIBRARY_HEADERS), \
+     $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes))
+# Re-export requested headers from static libraries.
+export_include_deps += $(strip \
+   $(foreach l,$(LOCAL_EXPORT_STATIC_LIBRARY_HEADERS), \
+     $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes))
+$(export_includes): PRIVATE_REEXPORTED_INCLUDES := $(export_include_deps)
+# By adding $(my_generated_sources) it makes sure the headers get generated
+# before any dependent source files get compiled.
+$(export_includes) : $(my_generated_sources) $(export_include_deps)
 	@echo Export includes file: $< -- $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@.tmp
+	$(hide) mkdir -p $(dir $@) && rm -f $@.tmp && touch $@.tmp
 ifdef my_export_c_include_dirs
 	$(hide) for d in $(PRIVATE_EXPORT_C_INCLUDE_DIRS); do \
 	        echo "-I $$d" >> $@.tmp; \
 	        done
-else
-	$(hide) touch $@.tmp
 endif
-ifeq ($(BUILDING_WITH_NINJA),true)
+ifdef export_include_deps
+	$(hide) for f in $(PRIVATE_REEXPORTED_INCLUDES); do \
+		cat $$f >> $@.tmp; \
+		done
+endif
 	$(hide) if cmp -s $@.tmp $@ ; then \
 	  rm $@.tmp ; \
 	else \
 	  mv $@.tmp $@ ; \
 	fi
-else
-	mv $@.tmp $@ ;
-endif
 
 # Kati adds restat=1 to ninja. GNU make does nothing for this.
 .KATI_RESTAT: $(export_includes)
diff --git a/core/build-system.html b/core/build-system.html
index bddde6a..95f35ce 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -438,7 +438,7 @@
 GEN := $(intermediates)/<font color=red>file.c</font>
 $(GEN): PRIVATE_INPUT_FILE := $(LOCAL_PATH)/<font color=red>input.file</font>
 $(GEN): PRIVATE_CUSTOM_TOOL = <font color=red>cat $(PRIVATE_INPUT_FILE) &gt; $@</font>
-$(GEN): <font color=red>$(LOCAL_PATH)/file.c</font>
+$(GEN): <font color=red>$(LOCAL_PATH)/input.file</font>
 	$(transform-generated-source)
 LOCAL_GENERATED_SOURCES += $(GEN)
 </pre>
diff --git a/core/ccache.mk b/core/ccache.mk
index 5c2ae23..d67bce6 100644
--- a/core/ccache.mk
+++ b/core/ccache.mk
@@ -17,7 +17,7 @@
 ifneq ($(filter-out false,$(USE_CCACHE)),)
   # The default check uses size and modification time, causing false misses
   # since the mtime depends when the repo was checked out
-  export CCACHE_COMPILERCHECK := content
+  export CCACHE_COMPILERCHECK ?= content
 
   # See man page, optimizations to get more cache hits
   # implies that __DATE__ and __TIME__ are not critical for functionality.
diff --git a/core/clang/HOST_CROSS_x86.mk b/core/clang/HOST_CROSS_x86.mk
index b78a074..c67825b 100644
--- a/core/clang/HOST_CROSS_x86.mk
+++ b/core/clang/HOST_CROSS_x86.mk
@@ -27,8 +27,7 @@
   $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_HOST_CROSS_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_x86_EXTRA_CPPFLAGS) \
-  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_CPPFLAGS) \
-  -target $(CLANG_CONFIG_x86_HOST_CROSS_TRIPLE)
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_CPPFLAGS)
 
 CLANG_CONFIG_x86_HOST_CROSS_EXTRA_LDFLAGS := \
   $(CLANG_CONFIG_EXTRA_LDFLAGS) \
diff --git a/core/clang/HOST_CROSS_x86_64.mk b/core/clang/HOST_CROSS_x86_64.mk
index b6f2de9..cdf49a9 100644
--- a/core/clang/HOST_CROSS_x86_64.mk
+++ b/core/clang/HOST_CROSS_x86_64.mk
@@ -27,8 +27,7 @@
   $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_HOST_CROSS_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_x86_64_EXTRA_CPPFLAGS) \
-  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_CPPFLAGS) \
-  -target $(CLANG_CONFIG_x86_64_HOST_CROSS_TRIPLE)
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_CPPFLAGS)
 
 CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_LDFLAGS := \
   $(CLANG_CONFIG_EXTRA_LDFLAGS) \
diff --git a/core/clang/HOST_x86.mk b/core/clang/HOST_x86.mk
index 0ec64ad..a2015f1 100644
--- a/core/clang/HOST_x86.mk
+++ b/core/clang/HOST_x86.mk
@@ -41,8 +41,7 @@
   $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_HOST_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_x86_EXTRA_CPPFLAGS) \
-  $(CLANG_CONFIG_x86_HOST_COMBO_EXTRA_CPPFLAGS) \
-  -target $(CLANG_CONFIG_x86_HOST_TRIPLE)
+  $(CLANG_CONFIG_x86_HOST_COMBO_EXTRA_CPPFLAGS)
 
 CLANG_CONFIG_x86_HOST_EXTRA_LDFLAGS := \
   $(CLANG_CONFIG_EXTRA_LDFLAGS) \
diff --git a/core/clang/HOST_x86_64.mk b/core/clang/HOST_x86_64.mk
index d46cb67..b2f83f7 100644
--- a/core/clang/HOST_x86_64.mk
+++ b/core/clang/HOST_x86_64.mk
@@ -41,8 +41,7 @@
   $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_HOST_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_x86_64_EXTRA_CPPFLAGS) \
-  $(CLANG_CONFIG_x86_64_HOST_COMBO_EXTRA_CPPFLAGS) \
-  -target $(CLANG_CONFIG_x86_64_HOST_TRIPLE)
+  $(CLANG_CONFIG_x86_64_HOST_COMBO_EXTRA_CPPFLAGS)
 
 CLANG_CONFIG_x86_64_HOST_EXTRA_LDFLAGS := \
   $(CLANG_CONFIG_EXTRA_LDFLAGS) \
diff --git a/core/clang/HOST_x86_common.mk b/core/clang/HOST_x86_common.mk
index 9e71750..7333297 100644
--- a/core/clang/HOST_x86_common.mk
+++ b/core/clang/HOST_x86_common.mk
@@ -13,7 +13,8 @@
 ifeq ($(HOST_OS),linux)
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_ASFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin
 
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)
@@ -22,8 +23,6 @@
 
 ifneq ($(strip $($(clang_2nd_arch_prefix)HOST_IS_64_BIT)),)
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CPPFLAGS := \
-  --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/x86_64-linux \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/backward
@@ -34,11 +33,9 @@
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8 \
   -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8 \
-  -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/lib64/
+  -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/lib64
 else
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CPPFLAGS := \
-  --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/x86_64-linux/32 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/backward
@@ -49,6 +46,6 @@
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8/32 \
   -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8/32 \
-  -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/lib32/
+  -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/lib32
 endif
 endif  # Linux
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index 5c1bf6f..906e677 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -27,8 +27,7 @@
 CLANG_CONFIG_arm_TARGET_EXTRA_CPPFLAGS := \
   $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
   $(CLANG_CONFIG_TARGET_EXTRA_CPPFLAGS) \
-  $(CLANG_CONFIG_arm_EXTRA_CPPFLAGS) \
-  -target $(CLANG_CONFIG_arm_TARGET_TRIPLE)
+  $(CLANG_CONFIG_arm_EXTRA_CPPFLAGS)
 
 CLANG_CONFIG_arm_TARGET_EXTRA_LDFLAGS := \
   $(CLANG_CONFIG_EXTRA_LDFLAGS) \
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
index 1a0176a..c3c808b 100644
--- a/core/clang/TARGET_mips.mk
+++ b/core/clang/TARGET_mips.mk
@@ -65,3 +65,7 @@
 RS_COMPAT_TRIPLE := mipsel-linux-android
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mipsel-android.a
+
+# Address sanitizer clang config
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-mips-android
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/TARGET_mips64.mk b/core/clang/TARGET_mips64.mk
index 104fb70..5ea56ff 100644
--- a/core/clang/TARGET_mips64.mk
+++ b/core/clang/TARGET_mips64.mk
@@ -64,3 +64,7 @@
 RS_COMPAT_TRIPLE := mips64el-linux-android
 
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-mips64el-android.a
+
+# Address sanitizer clang config
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-mips64-android
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
diff --git a/core/clang/arm.mk b/core/clang/arm.mk
index 4053bb2..a5472f4 100644
--- a/core/clang/arm.mk
+++ b/core/clang/arm.mk
@@ -4,12 +4,6 @@
 
 CLANG_CONFIG_arm_EXTRA_CFLAGS :=
 
-ifneq (,$(filter krait,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
-  # Android's clang support's krait as a CPU whereas GCC doesn't. Specify
-  # -mcpu here rather than the more normal core/combo/arch/arm/armv7-a-neon.mk.
-  CLANG_CONFIG_arm_EXTRA_CFLAGS += -mcpu=krait -mfpu=neon-vfpv4
-endif
-
 CLANG_CONFIG_arm_EXTRA_CPPFLAGS :=
 
 CLANG_CONFIG_arm_EXTRA_LDFLAGS :=
@@ -31,6 +25,15 @@
   -fno-tree-copy-prop \
   -fno-tree-loop-optimize
 
+ifneq (,$(filter krait,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
+  # Android's clang support's krait as a CPU whereas GCC doesn't. Specify
+  # -mcpu here rather than the more normal core/combo/arch/arm/armv7-a-neon.mk.
+  CLANG_CONFIG_arm_EXTRA_CFLAGS += -mcpu=krait -mfpu=neon-vfpv4
+
+  # This isn't really unknown, but allows us to only set -mcpu=krait
+  CLANG_CONFIG_arm_UNKNOWN_CFLAGS += -mcpu=cortex-a15
+endif
+
 define subst-clang-incompatible-arm-flags
   $(subst -march=armv5te,-march=armv5t,\
   $(subst -march=armv5e,-march=armv5,\
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 6cc3446..c7499ab 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -3,18 +3,13 @@
 LLVM_PREBUILTS_PATH := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin
 LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_PATH)/../lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
 
-CLANG := $(LLVM_PREBUILTS_PATH)/clang$(BUILD_EXECUTABLE_SUFFIX)
-CLANG_CXX := $(LLVM_PREBUILTS_PATH)/clang++$(BUILD_EXECUTABLE_SUFFIX)
-LLVM_AS := $(LLVM_PREBUILTS_PATH)/llvm-as$(BUILD_EXECUTABLE_SUFFIX)
-LLVM_LINK := $(LLVM_PREBUILTS_PATH)/llvm-link$(BUILD_EXECUTABLE_SUFFIX)
-
 CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
 LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
 
 # RenderScript-specific tools
 # These are tied to the version of LLVM directly in external/, so they might
 # trail the host prebuilts being used for the rest of the build process.
-RS_LLVM_PREBUILTS_VERSION := clang-2690385
+RS_LLVM_PREBUILTS_VERSION := clang-2812033
 RS_LLVM_PREBUILTS_BASE := prebuilts/clang/host
 RS_LLVM_PREBUILTS_PATH := $(RS_LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(RS_LLVM_PREBUILTS_VERSION)/bin
 RS_CLANG := $(RS_LLVM_PREBUILTS_PATH)/clang$(BUILD_EXECUTABLE_SUFFIX)
@@ -90,7 +85,6 @@
   -Wunused-but-set-parameter \
   -Wunused-but-set-variable \
   -fdiagnostics-color \
-  -fdebug-prefix-map=/proc/self/cwd=
 
 # Clang flags for all host rules
 CLANG_CONFIG_HOST_EXTRA_ASFLAGS :=
@@ -107,7 +101,7 @@
 # Clang flags for all target rules
 CLANG_CONFIG_TARGET_EXTRA_ASFLAGS :=
 CLANG_CONFIG_TARGET_EXTRA_CFLAGS := -nostdlibinc
-CLANG_CONFIG_TARGET_EXTRA_CPPFLAGS := -nostdlibinc
+CLANG_CONFIG_TARGET_EXTRA_CPPFLAGS :=
 CLANG_CONFIG_TARGET_EXTRA_LDFLAGS :=
 
 CLANG_DEFAULT_UB_CHECKS := \
@@ -168,7 +162,6 @@
 ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fno-omit-frame-pointer
 ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS := -Wl,-u,__asan_preinit
 
-ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES :=
 ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES := libasan
 
 # This allows us to use the superset of functionality that compiler-rt
@@ -179,3 +172,5 @@
 # We don't have 32-bit prebuilt libLLVM/libclang, so force to build them from source.
 FORCE_BUILD_LLVM_COMPONENTS := true
 endif
+
+include $(BUILD_SYSTEM)/clang/tidy.mk
diff --git a/core/clang/mips.mk b/core/clang/mips.mk
index 4a8f812..aeb2f6a 100644
--- a/core/clang/mips.mk
+++ b/core/clang/mips.mk
@@ -15,11 +15,6 @@
   -mno-synci \
   -mno-fused-madd
 
-# Temporary workaround for Mips clang++ problem,  creates
-#   relocated ptrs in read-only pic .gcc_exception_table;
-#   permanent fix pending at http://reviews.llvm.org/D9669
-CLANG_CONFIG_mips_UNKNOWN_CFLAGS += -Wl,--warn-shared-textrel
-
 # We don't have any mips flags to substitute yet.
 define subst-clang-incompatible-mips-flags
   $(1)
diff --git a/core/clang/mips64.mk b/core/clang/mips64.mk
index 1b72e05..20e87bd 100644
--- a/core/clang/mips64.mk
+++ b/core/clang/mips64.mk
@@ -15,11 +15,6 @@
   -mno-synci \
   -mno-fused-madd
 
-# Temporary workaround for Mips clang++ problem creating
-#   relocated ptrs in read-only pic .gcc_exception_table;
-#   permanent fix pending at http://reviews.llvm.org/D9669
-CLANG_CONFIG_mips64_UNKNOWN_CFLAGS += -Wl,--warn-shared-textrel
-
 # We don't have any mips64 flags to substitute yet.
 define subst-clang-incompatible-mips64-flags
   $(1)
diff --git a/core/clang/tidy.mk b/core/clang/tidy.mk
new file mode 100644
index 0000000..019e6f0
--- /dev/null
+++ b/core/clang/tidy.mk
@@ -0,0 +1,65 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Most Android source files are not clang-tidy clean yet.
+# Global tidy checks include only google* and misc-macro-parentheses,
+# but not google-readability*.
+DEFAULT_GLOBAL_TIDY_CHECKS := \
+  -*,google*,-google-readability*,misc-macro-parentheses
+
+# Disable style rules usually not followed by external projects.
+# Every word in DEFAULT_LOCAL_TIDY_CHECKS list has the following format:
+#   <local_path_prefix>:,<tidy-check-pattern>
+# The tidy-check-patterns of all matching local_path_prefixes will be used.
+# For example, external/google* projects will have:
+#   ,-google-build-using-namespace,-google-explicit-constructor
+#   ,-google-runtime-int,-misc-macro-parentheses,
+#   ,google-runtime-int,misc-macro-parentheses
+# where google-runtime-int and misc-macro-parentheses are enabled at the end.
+DEFAULT_LOCAL_TIDY_CHECKS := \
+  external/:,-google-build-using-namespace \
+  external/:,-google-explicit-constructor,-google-runtime-int \
+  external/:,-misc-macro-parentheses \
+  external/google:,google-runtime-int,misc-macro-parentheses \
+  external/webrtc/:,google-runtime-int \
+  hardware/qcom:,-google-build-using-namespace \
+  hardware/qcom:,-google-explicit-constructor,-google-runtime-int \
+  vendor/lge:,-google-build-using-namespace \
+  vendor/lge:,-google-explicit-constructor,-google-runtime-int \
+  vendor/widevine:,-google-build-using-namespace \
+  vendor/widevine:,-google-explicit-constructor,-google-runtime-int \
+
+# Returns 2nd word of $(1) if $(2) has prefix of the 1st word of $(1).
+define find_default_local_tidy_check2
+$(if $(filter $(word 1,$(1))%,$(2)/),$(word 2,$(1)))
+endef
+
+# Returns 2nd part of $(1) if $(2) has prefix of the 1st part of $(1).
+define find_default_local_tidy_check
+$(call find_default_local_tidy_check2,$(subst :,$(space),$(1)),$(2))
+endef
+
+# Returns concatenated tidy check patterns from the
+# DEFAULT_GLOBAL_TIDY_CHECKS and all matched patterns
+# in DEFAULT_LOCAL_TIDY_CHECKS based on given directory path $(1).
+define default_global_tidy_checks
+$(subst $(space),, \
+  $(DEFAULT_GLOBAL_TIDY_CHECKS) \
+  $(foreach pattern,$(DEFAULT_LOCAL_TIDY_CHECKS), \
+    $(call find_default_local_tidy_check,$(pattern),$(1)) \
+  ) \
+)
+endef
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
index 81bd3b8..ef28880 100644
--- a/core/clang/versions.mk
+++ b/core/clang/versions.mk
@@ -1,5 +1,5 @@
 ## Clang/LLVM release versions.
 
 LLVM_RELEASE_VERSION := 3.8
-LLVM_PREBUILTS_VERSION ?= clang-2690385
+LLVM_PREBUILTS_VERSION ?= clang-2812033
 LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 0d6a406..f61e3f7 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -246,15 +246,7 @@
 	$(PRODUCT_OUT)/oem \
 	$(PRODUCT_OUT)/dex_bootjars \
 	$(PRODUCT_OUT)/obj/JAVA_LIBRARIES \
-	$(PRODUCT_OUT)/obj/FAKE \
-	$(PRODUCT_OUT)/obj/EXECUTABLES/adbd_intermediates \
-	$(PRODUCT_OUT)/obj/EXECUTABLES/logd_intermediates \
-	$(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libfs_mgr_intermediates \
-	$(PRODUCT_OUT)/obj/EXECUTABLES/init_intermediates \
-	$(PRODUCT_OUT)/obj/ETC/mac_permissions.xml_intermediates \
-	$(PRODUCT_OUT)/obj/ETC/sepolicy_intermediates \
-	$(PRODUCT_OUT)/obj/ETC/sepolicy.recovery_intermediates \
-	$(PRODUCT_OUT)/obj/ETC/init.environ.rc_intermediates
+	$(PRODUCT_OUT)/obj/FAKE
 
 # The files/dirs to delete during a dataclean, which removes any files
 # in the staging and emulator data partitions.
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 59e907b..da882f3 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -25,7 +25,6 @@
 LOCAL_MANIFEST_PACKAGE_NAME:=
 LOCAL_PACKAGE_SPLITS:=
 LOCAL_REQUIRED_MODULES:=
-LOCAL_ACP_UNAVAILABLE:=
 LOCAL_MODULE_TAGS:=
 LOCAL_SRC_FILES:=
 LOCAL_SRC_FILES_EXCLUDE:=
@@ -36,6 +35,8 @@
 # Group static libraries with "-Wl,--start-group" and "-Wl,--end-group" when linking.
 LOCAL_GROUP_STATIC_LIBRARIES:=
 LOCAL_WHOLE_STATIC_LIBRARIES:=
+LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
+LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
 LOCAL_SHARED_LIBRARIES:=
 LOCAL_IS_HOST_MODULE:=
 LOCAL_CC:=
@@ -109,12 +110,11 @@
 LOCAL_JARJAR_RULES:=
 LOCAL_ADDITIONAL_JAVA_DIR:=
 LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
-LOCAL_DX_FLAGS:=
-LOCAL_JACK_ENABLED:=$(DEFAULT_JACK_ENABLED) # '' (ie disabled), disabled, full, incremental
+# full or incremental
+LOCAL_JACK_ENABLED:=full
 LOCAL_JACK_FLAGS:=
 LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
 LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
-LOCAL_JILL_FLAGS:=
 LOCAL_CERTIFICATE:=
 LOCAL_SDK_VERSION:=
 LOCAL_MIN_SDK_VERSION:=
@@ -162,7 +162,6 @@
 LOCAL_SOURCE_FILES_ALL_GENERATED:= # '',true
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
-LOCAL_DONT_DELETE_JAR_DIRS:=
 LOCAL_ADDITIONAL_CERTIFICATES:=
 LOCAL_PREBUILT_MODULE_FILE:=
 LOCAL_POST_LINK_CMD:=
@@ -171,6 +170,9 @@
 LOCAL_RMTYPEDEFS:=
 LOCAL_NO_SYNTAX_CHECK:=
 LOCAL_NO_STATIC_ANALYZER:=
+LOCAL_TIDY:=
+LOCAL_TIDY_CHECKS:=
+LOCAL_TIDY_FLAGS:=
 LOCAL_32_BIT_ONLY:= # '',true
 LOCAL_MULTILIB:=
 LOCAL_MODULE_TARGET_ARCH:=
@@ -204,6 +206,7 @@
 # Used to replace the installed file of a presigned prebuilt apk in PDK fusion build,
 # to avoid installing the presigned apks with classes.dex unstripped.
 LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=
+LOCAL_NDK_VERSION:=current
 
 # arch specific variables
 LOCAL_SRC_FILES_$(TARGET_ARCH):=
diff --git a/core/combo/HOST_CROSS_windows-x86.mk b/core/combo/HOST_CROSS_windows-x86.mk
index 6180a26..19ff7a3 100644
--- a/core/combo/HOST_CROSS_windows-x86.mk
+++ b/core/combo/HOST_CROSS_windows-x86.mk
@@ -22,10 +22,9 @@
 $(combo_var_prefix)GLOBAL_CFLAGS += --sysroot prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
 $(combo_var_prefix)GLOBAL_CFLAGS += -m32
 $(combo_var_prefix)GLOBAL_LDFLAGS += -m32
-TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
+$(combo_var_prefix)GLOBAL_LDFLAGS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib32
 $(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
 $(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
-$(combo_var_prefix)GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib32
 
 # Workaround differences in inttypes.h between host and target.
 # See bug 12708004.
@@ -37,12 +36,6 @@
 # Get 64-bit off_t and related functions.
 $(combo_var_prefix)GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64
 
-$(combo_var_prefix)CC := $(TOOLS_PREFIX)gcc
-$(combo_var_prefix)CXX := $(TOOLS_PREFIX)g++
-$(combo_var_prefix)AR := $(TOOLS_PREFIX)ar
-$(combo_var_prefix)NM := $(TOOLS_PREFIX)nm
-$(combo_var_prefix)OBJDUMP := $(TOOLS_PREFIX)objdump
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OBJDUMP) -x $(1) | grep "^Name" | cut -f3 -d" " > $(2)
 $(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)NM) -g -f p $(1) | cut -f1-2 -d" " >> $(2)
diff --git a/core/combo/HOST_CROSS_windows-x86_64.mk b/core/combo/HOST_CROSS_windows-x86_64.mk
index e9b19cf..5302c83 100644
--- a/core/combo/HOST_CROSS_windows-x86_64.mk
+++ b/core/combo/HOST_CROSS_windows-x86_64.mk
@@ -22,10 +22,9 @@
 $(combo_var_prefix)GLOBAL_CFLAGS += --sysroot prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
 $(combo_var_prefix)GLOBAL_CFLAGS += -m64
 $(combo_var_prefix)GLOBAL_LDFLAGS += -m64
-TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
+$(combo_var_prefix)GLOBAL_LDFLAGS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib64
 $(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
 $(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
-$(combo_var_prefix)GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib64
 
 # Workaround differences in inttypes.h between host and target.
 # See bug 12708004.
@@ -37,12 +36,6 @@
 # Get 64-bit off_t and related functions.
 $(combo_var_prefix)GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64
 
-$(combo_var_prefix)CC := $(TOOLS_PREFIX)gcc
-$(combo_var_prefix)CXX := $(TOOLS_PREFIX)g++
-$(combo_var_prefix)AR := $(TOOLS_PREFIX)ar
-$(combo_var_prefix)NM := $(TOOLS_PREFIX)nm
-$(combo_var_prefix)OBJDUMP := $(TOOLS_PREFIX)objdump
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OBJDUMP) -x $(1) | grep "^Name" | cut -f3 -d" " > $(2)
 $(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)NM) -g -f p $(1) | cut -f1-2 -d" " >> $(2)
diff --git a/core/combo/HOST_darwin-x86.mk b/core/combo/HOST_darwin-x86.mk
index fc56e52..f620043 100644
--- a/core/combo/HOST_darwin-x86.mk
+++ b/core/combo/HOST_darwin-x86.mk
@@ -31,33 +31,24 @@
 
 include $(BUILD_COMBOS)/mac_version.mk
 
-$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_ROOT := prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1
-$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_ROOT)/bin/i686-apple-darwin$(gcc_darwin_version)
-$(combo_2nd_arch_prefix)HOST_CC  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-gcc
-$(combo_2nd_arch_prefix)HOST_CXX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-g++
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_macho,$(1),$(2))
 endef
 
-# gcc location for clang; to be updated when clang is updated
-# HOST_TOOLCHAIN_ROOT is a Darwin-specific define
-$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_ROOT)
-
-$(combo_2nd_arch_prefix)HOST_AR := $(AR)
-
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -isysroot $(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) -DMACOSX_DEPLOYMENT_TARGET=$(mac_sdk_version)
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1
 $(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -isysroot $(mac_sdk_root) -Wl,-syslibroot,$(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version)
 
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -fPIC -funwind-tables
-$(combo_2nd_arch_prefix)HOST_NO_UNDEFINED_LDFLAGS := -Wl,-undefined,error
 
 $(combo_2nd_arch_prefix)HOST_SHLIB_SUFFIX := .dylib
 $(combo_2nd_arch_prefix)HOST_JNILIB_SUFFIX := .jnilib
 
 $(combo_2nd_arch_prefix)HOST_GLOBAL_ARFLAGS := cqs
 
+# Use Darwin's libc++, as Darwin's libstdc++ is old and does not support C++11
+$(combo_2nd_arch_prefix)HOST_SYSTEMCPP_CPPFLAGS := -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1
+$(combo_2nd_arch_prefix)HOST_SYSTEMCPP_LDFLAGS := -stdlib=libc++
+
 ############################################################
 ## Macros after this line are shared by the 64-bit config.
 
diff --git a/core/combo/HOST_darwin-x86_64.mk b/core/combo/HOST_darwin-x86_64.mk
index 251455f..f2d47be 100644
--- a/core/combo/HOST_darwin-x86_64.mk
+++ b/core/combo/HOST_darwin-x86_64.mk
@@ -31,33 +31,24 @@
 
 include $(BUILD_COMBOS)/mac_version.mk
 
-HOST_TOOLCHAIN_ROOT := prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1
-HOST_TOOLCHAIN_PREFIX := $(HOST_TOOLCHAIN_ROOT)/bin/i686-apple-darwin$(gcc_darwin_version)
-HOST_CC  := $(HOST_TOOLCHAIN_PREFIX)-gcc
-HOST_CXX := $(HOST_TOOLCHAIN_PREFIX)-g++
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_macho,$(1),$(2))
 endef
 
-# gcc location for clang; to be updated when clang is updated
-# HOST_TOOLCHAIN_ROOT is a Darwin-specific define
-HOST_TOOLCHAIN_FOR_CLANG := $(HOST_TOOLCHAIN_ROOT)
-
-HOST_AR := $(AR)
-
 HOST_GLOBAL_CFLAGS += -isysroot $(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) -DMACOSX_DEPLOYMENT_TARGET=$(mac_sdk_version)
-HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1
 HOST_GLOBAL_LDFLAGS += -isysroot $(mac_sdk_root) -Wl,-syslibroot,$(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version)
 
 HOST_GLOBAL_CFLAGS += -fPIC -funwind-tables
-HOST_NO_UNDEFINED_LDFLAGS := -Wl,-undefined,error
 
 HOST_SHLIB_SUFFIX := .dylib
 HOST_JNILIB_SUFFIX := .jnilib
 
 HOST_GLOBAL_ARFLAGS := cqs
 
+# Use Darwin's libc++, as Darwin's libstdc++ is old and does not support C++11
+HOST_SYSTEMCPP_CPPFLAGS := -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1
+HOST_SYSTEMCPP_LDFLAGS := -stdlib=libc++
+
 # We Reuse the following functions with the same name from HOST_darwin-x86.mk:
 # transform-host-o-to-shared-lib-inner
 # transform-host-o-to-executable-inner
diff --git a/core/combo/HOST_linux-x86.mk b/core/combo/HOST_linux-x86.mk
index 169e2d2..2213cd3 100644
--- a/core/combo/HOST_linux-x86.mk
+++ b/core/combo/HOST_linux-x86.mk
@@ -17,15 +17,6 @@
 # Configuration for builds hosted on linux-x86.
 # Included by combo/select.mk
 
-ifeq ($(strip $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)),)
-$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/bin/x86_64-linux-
-endif
-$(combo_2nd_arch_prefix)HOST_CC  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)gcc
-$(combo_2nd_arch_prefix)HOST_CXX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)g++
-$(combo_2nd_arch_prefix)HOST_AR  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)ar
-$(combo_2nd_arch_prefix)HOST_READELF  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)readelf
-$(combo_2nd_arch_prefix)HOST_NM  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)nm
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
@@ -55,8 +46,6 @@
 # both 32- and 64-bit (whether art uses them or not --- 9d59f417767991246848c3e101cb27d2dfea5988).
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1
 
-$(combo_2nd_arch_prefix)HOST_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 ############################################################
 ## Macros after this line are shared by the 64-bit config.
 
diff --git a/core/combo/HOST_linux-x86_64.mk b/core/combo/HOST_linux-x86_64.mk
index 9766f2b..ccd2239 100644
--- a/core/combo/HOST_linux-x86_64.mk
+++ b/core/combo/HOST_linux-x86_64.mk
@@ -17,15 +17,6 @@
 # Configuration for builds hosted on linux-x86_64.
 # Included by combo/select.mk
 
-ifeq ($(strip $(HOST_TOOLCHAIN_PREFIX)),)
-HOST_TOOLCHAIN_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/bin/x86_64-linux-
-endif
-HOST_CC  := $(HOST_TOOLCHAIN_PREFIX)gcc
-HOST_CXX := $(HOST_TOOLCHAIN_PREFIX)g++
-HOST_AR  := $(HOST_TOOLCHAIN_PREFIX)ar
-HOST_READELF  := $(HOST_TOOLCHAIN_PREFIX)readelf
-HOST_NM  := $(HOST_TOOLCHAIN_PREFIX)nm
-
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
@@ -49,5 +40,3 @@
 # Workaround differences in inttypes.h between host and target.
 # See bug 12708004.
 HOST_GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
-
-HOST_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 510aae5..acd43f8 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -36,11 +36,7 @@
 # Decouple NDK library selection with platform compiler version
 $(combo_2nd_arch_prefix)TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 $(combo_2nd_arch_prefix)TARGET_GCC_VERSION := 4.9
-else
-$(combo_2nd_arch_prefix)TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
 ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
@@ -50,27 +46,12 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
 $(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/arm/arm-linux-androideabi-$($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)
-$(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/arm-linux-androideabi-
-endif
-
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
-$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-$(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 $(combo_2nd_arch_prefix)TARGET_arm_CFLAGS :=    -O2 \
                         -fomit-frame-pointer \
                         -fstrict-aliasing    \
@@ -119,16 +100,6 @@
 			-fno-strict-volatile-bitfields
 endif
 
-# This is to avoid the dreaded warning compiler message:
-#   note: the mangling of 'va_list' has changed in GCC 4.4
-#
-# The fact that the mangling changed does not affect the NDK ABI
-# very fortunately (since none of the exposed APIs used va_list
-# in their exported C++ functions). Also, GCC 4.5 has already
-# removed the warning from the compiler.
-#
-$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -Wno-psabi
-
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS += \
 			-Wl,-z,noexecstack \
 			-Wl,-z,relro \
@@ -155,21 +126,6 @@
 			-frename-registers
 
 libc_root := bionic/libc
-libm_root := bionic/libm
-
-
-## on some hosts, the target cross-compiler is not available so do not run this command
-ifneq ($(wildcard $($(combo_2nd_arch_prefix)TARGET_CC)),)
-# We compile with the global cflags to ensure that
-# any flags which affect libgcc are correctly taken
-# into account.
-$(combo_2nd_arch_prefix)TARGET_LIBGCC := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) \
-        $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-libgcc-file-name)
-$(combo_2nd_arch_prefix)TARGET_LIBATOMIC := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) \
-        $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libatomic.a)
-$(combo_2nd_arch_prefix)TARGET_LIBGCOV := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) \
-        $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libgcov.a)
-endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -179,9 +135,7 @@
 $(combo_2nd_arch_prefix)TARGET_C_INCLUDES := \
 	$(libc_root)/arch-arm/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/arm \
+	$(KERNEL_HEADERS)
 
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_STATIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_DYNAMIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/TARGET_linux-arm64.mk b/core/combo/TARGET_linux-arm64.mk
index 6a1d861..fdb1da7 100644
--- a/core/combo/TARGET_linux-arm64.mk
+++ b/core/combo/TARGET_linux-arm64.mk
@@ -36,11 +36,7 @@
 # Decouple NDK library selection with platform compiler version
 TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 TARGET_GCC_VERSION := 4.9
-else
-TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT).mk
 ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
@@ -50,27 +46,12 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
 TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/aarch64/aarch64-linux-android-$(TARGET_GCC_VERSION)
-TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/aarch64-linux-android-
-endif
-
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
-TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 TARGET_GLOBAL_CFLAGS += \
     -fno-strict-aliasing \
 
@@ -95,16 +76,6 @@
 
 TARGET_GLOBAL_CFLAGS += -fno-strict-volatile-bitfields
 
-# This is to avoid the dreaded warning compiler message:
-#   note: the mangling of 'va_list' has changed in GCC 4.4
-#
-# The fact that the mangling changed does not affect the NDK ABI
-# very fortunately (since none of the exposed APIs used va_list
-# in their exported C++ functions). Also, GCC 4.5 has already
-# removed the warning from the compiler.
-#
-TARGET_GLOBAL_CFLAGS += -Wno-psabi
-
 TARGET_GLOBAL_LDFLAGS += \
 			-Wl,-z,noexecstack \
 			-Wl,-z,relro \
@@ -135,14 +106,6 @@
 			-frename-registers
 
 libc_root := bionic/libc
-libm_root := bionic/libm
-
-TARGET_LIBGCC := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-	-print-libgcc-file-name)
-TARGET_LIBATOMIC := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-	-print-file-name=libatomic.a)
-TARGET_LIBGCOV := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-	-print-file-name=libgcov.a)
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -152,9 +115,7 @@
 TARGET_C_INCLUDES := \
 	$(libc_root)/arch-arm64/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/arm64 \
+	$(KERNEL_HEADERS)
 
 TARGET_CRTBEGIN_STATIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 TARGET_CRTBEGIN_DYNAMIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
index 186d88f..0a9e42c 100644
--- a/core/combo/TARGET_linux-mips.mk
+++ b/core/combo/TARGET_linux-mips.mk
@@ -36,11 +36,7 @@
 # Decouple NDK library selection with platform compiler version
 $(combo_2nd_arch_prefix)TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 $(combo_2nd_arch_prefix)TARGET_GCC_VERSION := 4.9
-else
-$(combo_2nd_arch_prefix)TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT).mk
 ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
@@ -50,27 +46,12 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
 $(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/mips/mips64el-linux-android-$($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)
-$(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/mips64el-linux-android-
-endif
-
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
-$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-$(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 TARGET_mips_CFLAGS :=	-O2 \
 			-fomit-frame-pointer \
 			-fno-strict-aliasing    \
@@ -126,25 +107,6 @@
 			-frename-registers
 
 libc_root := bionic/libc
-libm_root := bionic/libm
-
-
-## on some hosts, the target cross-compiler is not available so do not run this command
-ifneq ($(wildcard $($(combo_2nd_arch_prefix)TARGET_CC)),)
-# We compile with the global cflags to ensure that
-# any flags which affect libgcc are correctly taken
-# into account.
-$(combo_2nd_arch_prefix)TARGET_LIBGCC := \
-  $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libgcc.a)
-$(combo_2nd_arch_prefix)TARGET_LIBATOMIC := \
-  $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libatomic.a)
-LIBGCC_EH := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libgcc_eh.a)
-ifneq ($(LIBGCC_EH),libgcc_eh.a)
-  $(combo_2nd_arch_prefix)TARGET_LIBGCC += $(LIBGCC_EH)
-endif
-$(combo_2nd_arch_prefix)TARGET_LIBGCOV := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) \
-        --print-file-name=libgcov.a)
-endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -154,9 +116,7 @@
 $(combo_2nd_arch_prefix)TARGET_C_INCLUDES := \
 	$(libc_root)/arch-mips/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/mips \
+	$(KERNEL_HEADERS)
 
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_STATIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_DYNAMIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/TARGET_linux-mips64.mk b/core/combo/TARGET_linux-mips64.mk
index 3e1f61a..91b0057 100644
--- a/core/combo/TARGET_linux-mips64.mk
+++ b/core/combo/TARGET_linux-mips64.mk
@@ -36,11 +36,7 @@
 # Decouple NDK library selection with platform compiler version
 TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 TARGET_GCC_VERSION := 4.9
-else
-TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT).mk
 ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
@@ -50,27 +46,12 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
 TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/mips/mips64el-linux-android-$(TARGET_GCC_VERSION)
-TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/mips64el-linux-android-
-endif
-
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
-TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 TARGET_mips_CFLAGS :=	-O2 \
 			-fomit-frame-pointer \
 			-fno-strict-aliasing    \
@@ -132,25 +113,6 @@
 			-frename-registers
 
 libc_root := bionic/libc
-libm_root := bionic/libm
-
-
-## on some hosts, the target cross-compiler is not available so do not run this command
-ifneq ($(wildcard $(TARGET_CC)),)
-# We compile with the global cflags to ensure that
-# any flags which affect libgcc are correctly taken
-# into account.
-TARGET_LIBGCC := \
-  $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) -print-file-name=libgcc.a)
-TARGET_LIBATOMIC := \
-  $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) -print-file-name=libatomic.a)
-LIBGCC_EH := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) -print-file-name=libgcc_eh.a)
-ifneq ($(LIBGCC_EH),libgcc_eh.a)
-  TARGET_LIBGCC += $(LIBGCC_EH)
-endif
-TARGET_LIBGCOV := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-        --print-file-name=libgcov.a)
-endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -161,11 +123,7 @@
 TARGET_C_INCLUDES := \
 	$(libc_root)/arch-mips64/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/mips \
-
-# TODO: perhaps use $(libm_root)/include/mips64 instead of mips ?
+	$(KERNEL_HEADERS)
 
 TARGET_CRTBEGIN_STATIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 TARGET_CRTBEGIN_DYNAMIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/TARGET_linux-x86.mk b/core/combo/TARGET_linux-x86.mk
index 558ec3b..e8cd92d 100644
--- a/core/combo/TARGET_linux-x86.mk
+++ b/core/combo/TARGET_linux-x86.mk
@@ -25,11 +25,7 @@
 # Decouple NDK library selection with platform compiler version
 $(combo_2nd_arch_prefix)TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 $(combo_2nd_arch_prefix)TARGET_GCC_VERSION := 4.9
-else
-$(combo_2nd_arch_prefix)TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 # Include the arch-variant-specific configuration file.
 # Its role is to define various ARCH_X86_HAVE_XXX feature macros,
@@ -43,38 +39,13 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
 $(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/x86/x86_64-linux-android-$($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)
-$(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/x86_64-linux-android-
-endif
-
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
-$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-ifneq ($(wildcard $($(combo_2nd_arch_prefix)TARGET_CC)),)
-$(combo_2nd_arch_prefix)TARGET_LIBGCC := \
-	$(shell $($(combo_2nd_arch_prefix)TARGET_CC) -m32 -print-file-name=libgcc.a)
-$(combo_2nd_arch_prefix)TARGET_LIBATOMIC := \
-	$(shell $($(combo_2nd_arch_prefix)TARGET_CC) -m32 -print-file-name=libatomic.a)
-$(combo_2nd_arch_prefix)TARGET_LIBGCOV := \
-	$(shell $($(combo_2nd_arch_prefix)TARGET_CC) -m32 -print-file-name=libgcov.a)
-endif
-
-$(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 libc_root := bionic/libc
-libm_root := bionic/libm
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -134,9 +105,7 @@
 $(combo_2nd_arch_prefix)TARGET_C_INCLUDES := \
 	$(libc_root)/arch-x86/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/i387 \
+	$(KERNEL_HEADERS)
 
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_STATIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 $(combo_2nd_arch_prefix)TARGET_CRTBEGIN_DYNAMIC_O := $($(combo_2nd_arch_prefix)TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/TARGET_linux-x86_64.mk b/core/combo/TARGET_linux-x86_64.mk
index 12166ec..8b2a37f 100644
--- a/core/combo/TARGET_linux-x86_64.mk
+++ b/core/combo/TARGET_linux-x86_64.mk
@@ -25,11 +25,7 @@
 # Decouple NDK library selection with platform compiler version
 TARGET_NDK_GCC_VERSION := 4.9
 
-ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 TARGET_GCC_VERSION := 4.9
-else
-TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
-endif
 
 # Include the arch-variant-specific configuration file.
 # Its role is to define various ARCH_X86_HAVE_XXX feature macros,
@@ -43,38 +39,13 @@
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
 include $(BUILD_SYSTEM)/combo/fdo.mk
 
-# You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
-ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
 TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/x86/x86_64-linux-android-$(TARGET_GCC_VERSION)
-TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/x86_64-linux-android-
-endif
-
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
-TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
 
 define $(combo_var_prefix)transform-shared-lib-to-toc
 $(call _gen_toc_command_for_elf,$(1),$(2))
 endef
 
-ifneq ($(wildcard $(TARGET_CC)),)
-TARGET_LIBGCC := \
-	$(shell $(TARGET_CC) -m64 -print-file-name=libgcc.a)
-TARGET_LIBATOMIC := \
-	$(shell $(TARGET_CC) -m64 -print-file-name=libatomic.a)
-TARGET_LIBGCOV := \
-	$(shell $(TARGET_CC) -m64 -print-file-name=libgcov.a)
-endif
-
-TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
-
 libc_root := bionic/libc
-libm_root := bionic/libm
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
@@ -143,9 +114,7 @@
 TARGET_C_INCLUDES := \
 	$(libc_root)/arch-x86_64/include \
 	$(libc_root)/include \
-	$(KERNEL_HEADERS) \
-	$(libm_root)/include \
-	$(libm_root)/include/amd64 \
+	$(KERNEL_HEADERS)
 
 TARGET_CRTBEGIN_STATIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
 TARGET_CRTBEGIN_DYNAMIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_dynamic.o
diff --git a/core/combo/arch/arm/armv7-a-neon.mk b/core/combo/arch/arm/armv7-a-neon.mk
index 5d5b050..5517a79 100644
--- a/core/combo/arch/arm/armv7-a-neon.mk
+++ b/core/combo/arch/arm/armv7-a-neon.mk
@@ -31,6 +31,11 @@
 	arch_variant_ldflags := \
 		-Wl,--no-fix-cortex-a8
 else
+ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),cortex-a9)
+	arch_variant_cflags := -march=armv7-a
+	arch_variant_ldflags := \
+		-Wl,--no-fix-cortex-a8
+else
 	arch_variant_cflags := -march=armv7-a
 	# Generic ARM might be a Cortex A8 -- better safe than sorry
 	arch_variant_ldflags := \
@@ -38,6 +43,7 @@
 endif
 endif
 endif
+endif
 
 ifeq (true,$(local_arch_has_lpae))
 	# Fake an ARM compiler flag as these processors support LPAE which GCC/clang
diff --git a/core/combo/arch/x86/sandybridge.mk b/core/combo/arch/x86/sandybridge.mk
index bca5953..830e1db 100644
--- a/core/combo/arch/x86/sandybridge.mk
+++ b/core/combo/arch/x86/sandybridge.mk
@@ -5,13 +5,13 @@
 ARCH_X86_HAVE_SSE4   := true
 ARCH_X86_HAVE_SSE4_1 := true
 ARCH_X86_HAVE_SSE4_2 := true
-ARCH_X86_HAVE_AES_NI := true
-ARCH_X86_HAVE_AVX    := true
+ARCH_X86_HAVE_AES_NI := false
+ARCH_X86_HAVE_AVX    := false
 ARCH_X86_HAVE_POPCNT := true
 ARCH_X86_HAVE_MOVBE  := false
 
 # CFLAGS for this arch
 arch_variant_cflags := \
-	-march=corei7-avx \
+	-march=corei7 \
 	-mfpmath=sse \
 
diff --git a/core/combo/arch/x86/x86_64.mk b/core/combo/arch/x86/x86_64.mk
new file mode 100644
index 0000000..620fbd8
--- /dev/null
+++ b/core/combo/arch/x86/x86_64.mk
@@ -0,0 +1,18 @@
+# This file is used as the second (32-bit) architecture when building a generic
+# x86_64 64-bit platform image. (full_x86_64-eng / sdk_x86_64-eng)
+#
+# The generic 'x86' variant cannot be used, since it resets some flags used
+# by the 'x86_64' variant.
+
+ARCH_X86_HAVE_SSSE3 := true
+ARCH_X86_HAVE_MOVBE := false # Only supported on Atom.
+ARCH_X86_HAVE_POPCNT := true
+ARCH_X86_HAVE_SSE4 := true
+ARCH_X86_HAVE_SSE4_1 := true
+ARCH_X86_HAVE_SSE4_2 := true
+
+
+# Some intrinsic functions used by libcxx only exist for prescott or newer CPUs.
+arch_variant_cflags := \
+    -march=prescott \
+
diff --git a/core/combo/arch/x86_64/sandybridge.mk b/core/combo/arch/x86_64/sandybridge.mk
index 865548c..574ec8a 100644
--- a/core/combo/arch/x86_64/sandybridge.mk
+++ b/core/combo/arch/x86_64/sandybridge.mk
@@ -5,11 +5,11 @@
 ARCH_X86_HAVE_SSE4   := true
 ARCH_X86_HAVE_SSE4_1 := true
 ARCH_X86_HAVE_SSE4_2 := true
-ARCH_X86_HAVE_AES_NI := true
-ARCH_X86_HAVE_AVX    := true
+ARCH_X86_HAVE_AES_NI := false
+ARCH_X86_HAVE_AVX    := false
 ARCH_X86_HAVE_POPCNT := true
 ARCH_X86_HAVE_MOVBE  := false
 
 # CFLAGS for this arch
 arch_variant_cflags := \
-	-march=corei7-avx
+	-march=corei7
diff --git a/core/combo/javac.mk b/core/combo/javac.mk
index 7f66ea8..9042d83 100644
--- a/core/combo/javac.mk
+++ b/core/combo/javac.mk
@@ -9,11 +9,6 @@
 #   COMMON_JAVAC -- Java compiler command with common arguments
 #
 
-ifndef ANDROID_COMPILE_WITH_JACK
-# Defines if compilation with jack is enabled by default.
-ANDROID_COMPILE_WITH_JACK := true
-endif
-
 common_jdk_flags := -Xmaxerrs 9999999
 
 # Use the indexer wrapper to index the codebase instead of the javac compiler
diff --git a/core/combo/mac_version.mk b/core/combo/mac_version.mk
index 51394c6..fb05e59 100644
--- a/core/combo/mac_version.mk
+++ b/core/combo/mac_version.mk
@@ -5,6 +5,13 @@
 #   mac_sdk_root
 #   gcc_darwin_version
 
+# You can no longer install older SDKs in newer xcode versions, so it appears
+# to be expected to use the newer SDKs, but set command line flags in order to
+# target older Mac OS X versions.
+#
+# We'll use the oldest SDK we can find, and then use the -mmacosx-version-min
+# and MACOSX_DEPLOYMENT_TARGET flags to set our minimum version.
+
 ifndef build_mac_version
 
 build_mac_version := $(shell sw_vers -productVersion)
@@ -32,21 +39,13 @@
 #  or /Volume/Xcode/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.?.sdk
 mac_sdk_root := $(mac_sdk_path)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX$(mac_sdk_version).sdk
 ifeq ($(wildcard $(mac_sdk_root)),)
-# try legacy /Developer/SDKs/MacOSX10.?.sdk
-$(warning no SDK $(mac_sdk_version) at $(mac_sdk_root), trying legacy dir)
-mac_sdk_root := /Developer/SDKs/MacOSX$(mac_sdk_version).sdk
-endif
-ifeq ($(wildcard $(mac_sdk_root)),)
 $(warning *****************************************************)
 $(warning * Can not find SDK $(mac_sdk_version) at $(mac_sdk_root))
 $(warning *****************************************************)
 $(error Stop.)
 endif
 
-ifeq ($(mac_sdk_version),10.6)
-  gcc_darwin_version := 10
-else
-  gcc_darwin_version := 11
-endif
+# Set to the minimum version of OS X that we want to run on.
+mac_sdk_version := $(firstword $(mac_sdk_versions_supported))
 
 endif  # ifndef build_mac_version
diff --git a/core/combo/select.mk b/core/combo/select.mk
index df12e7e..54ea2da 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -28,11 +28,6 @@
 
 # Set reasonable defaults for the various variables
 
-$(combo_var_prefix)CC := $(CC)
-$(combo_var_prefix)CXX := $(CXX)
-$(combo_var_prefix)AR := $(AR)
-$(combo_var_prefix)STRIP := $(STRIP)
-
 $(combo_var_prefix)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar
 $(combo_var_prefix)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing
 $(combo_var_prefix)GLOBAL_CPPFLAGS :=
diff --git a/core/config.mk b/core/config.mk
index 94c880f..894af20 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -41,7 +41,6 @@
 endif
 
 # Standard source directories.
-SRC_DOCS:= $(TOPDIR)docs
 # TODO: Enforce some kind of layering; only add include paths
 #       when a module links against a particular library.
 # TODO: See if we can remove most of these from the global list.
@@ -56,9 +55,6 @@
 	$(TOPDIR)frameworks/native/opengl/include \
 	$(TOPDIR)frameworks/av/include \
 	$(TOPDIR)frameworks/base/include
-SRC_HOST_HEADERS:=$(TOPDIR)tools/include
-SRC_LIBRARIES:= $(TOPDIR)libs
-SRC_SERVERS:= $(TOPDIR)servers
 SRC_TARGET_DIR := $(TOPDIR)build/target
 SRC_API_DIR := $(TOPDIR)prebuilts/sdk/api
 SRC_SYSTEM_API_DIR := $(TOPDIR)prebuilts/sdk/system-api
@@ -197,8 +193,8 @@
 #     etc.
 #
 # NOTE: These directories MUST contain post-processed headers using the
-# bionic/libc/kernel/clean_header.py tool. Additionally, the original kernel
-# headers must also be checked in, but in a different subdirectory. By
+# bionic/libc/kernel/tools/clean_header.py tool. Additionally, the original
+# kernel headers must also be checked in, but in a different subdirectory. By
 # convention, the originals should be checked into original-kernel-headers
 # directory of the same parent dir. For example,
 #     device/samsung/tuna/kernel-headers            <----- post-processed
@@ -339,14 +335,6 @@
 TARGET_CPU_ABI_LIST_32_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_32_BIT)))
 TARGET_CPU_ABI_LIST_64_BIT := $(subst $(space),$(comma),$(strip $(TARGET_CPU_ABI_LIST_64_BIT)))
 
-# Compute TARGET_TOOLCHAIN_ROOT from TARGET_TOOLS_PREFIX
-# if only TARGET_TOOLS_PREFIX is passed to the make command.
-ifndef TARGET_TOOLCHAIN_ROOT
-TARGET_TOOLCHAIN_ROOT := $(patsubst %/, %, $(dir $(TARGET_TOOLS_PREFIX)))
-TARGET_TOOLCHAIN_ROOT := $(patsubst %/, %, $(dir $(TARGET_TOOLCHAIN_ROOT)))
-TARGET_TOOLCHAIN_ROOT := $(wildcard $(TARGET_TOOLCHAIN_ROOT))
-endif
-
 # Normalize WITH_STATIC_ANALYZER and WITH_SYNTAX_CHECK
 ifeq ($(strip $(WITH_STATIC_ANALYZER)),0)
   WITH_STATIC_ANALYZER :=
@@ -358,6 +346,20 @@
 # define clang/llvm versions and base directory.
 include $(BUILD_SYSTEM)/clang/versions.mk
 
+# Unset WITH_TIDY_ONLY if global WITH_TIDY_ONLY is not true nor 1.
+ifeq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
+  WITH_TIDY_ONLY :=
+endif
+
+PATH_TO_CLANG_TIDY := \
+    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/clang-tidy
+ifeq ($(wildcard $(PATH_TO_CLANG_TIDY)),)
+  ifneq (,$(filter 1 true,$(WITH_TIDY)))
+    $(warning *** Disable WITH_TIDY because $(PATH_TO_CLANG_TIDY) does not exist)
+  endif
+  PATH_TO_CLANG_TIDY :=
+endif
+
 # Disable WITH_STATIC_ANALYZER and WITH_SYNTAX_CHECK if tool can't be found
 SYNTAX_TOOLS_PREFIX := \
     $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/tools/scan-build/libexec
@@ -454,6 +456,8 @@
 #
 # Tools that are prebuilts for TARGET_BUILD_APPS
 #
+prebuilt_sdk_tools := prebuilts/sdk/tools
+prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
 
 ACP := $(HOST_OUT_EXECUTABLES)/acp
 AIDL := $(HOST_OUT_EXECUTABLES)/aidl
@@ -468,13 +472,16 @@
 DX := $(HOST_OUT_EXECUTABLES)/dx
 MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
 
+# Always use prebuilts for ckati and makeparallel
+prebuilt_build_tools := prebuilts/build-tools
+prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/bin
+CKATI := $(prebuilt_build_tools_bin)/ckati
+MAKEPARALLEL := $(prebuilt_build_tools_bin)/makeparallel
+
 USE_PREBUILT_SDK_TOOLS_IN_PLACE := true
 
 # Override the definitions above for unbundled and PDK builds
 ifneq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
-prebuilt_sdk_tools := prebuilts/sdk/tools
-prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
-
 ACP := $(prebuilt_sdk_tools_bin)/acp
 AIDL := $(prebuilt_sdk_tools_bin)/aidl
 AAPT := $(prebuilt_sdk_tools_bin)/aapt
@@ -493,6 +500,8 @@
 BCC_COMPAT := $(prebuilt_sdk_tools_bin)/bcc_compat
 endif # TARGET_BUILD_PDK
 endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK
+prebuilt_sdk_tools :=
+prebuilt_sdk_tools_bin :=
 
 
 # ---------------------------------------------------------------
@@ -507,6 +516,7 @@
 BISON_PKGDATADIR := $(PWD)/external/bison/data
 BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison
 YACC := $(BISON) -d
+BISON_DATA := $(wildcard external/bison/data/* external/bison/data/*/*)
 
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
@@ -519,6 +529,10 @@
 BREAKPAD_GENERATE_SYMBOLS := false
 endif
 PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
+NANOPB_SRCS := external/nanopb-c/generator/protoc-gen-nanopb \
+    $(wildcard external/nanopb-c/generator/*.py \
+               external/nanopb-c/generator/google/*.py \
+               external/nanopb-c/generator/proto/*.py)
 VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
 DBUS_GENERATOR := $(HOST_OUT_EXECUTABLES)/dbus-binding-generator
 MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
@@ -528,6 +542,16 @@
 else
 MKBOOTIMG := $(BOARD_CUSTOM_MKBOOTIMG)
 endif
+ifeq (,$(strip $(BOARD_CUSTOM_BPTTOOL)))
+BPTTOOL := $(HOST_OUT_EXECUTABLES)/bpttool$(HOST_EXECUTABLE_SUFFIX)
+else
+BPTTOOL := $(BOARD_CUSTOM_BPTTOOL)
+endif
+ifeq (,$(strip $(BOARD_CUSTOM_BVBTOOL)))
+BVBTOOL := $(HOST_OUT_EXECUTABLES)/bvbtool$(HOST_EXECUTABLE_SUFFIX)
+else
+BVBTOOL := $(BOARD_CUSTOM_BVBTOOL)
+endif
 APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX)
 FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
 MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX)
@@ -542,26 +566,23 @@
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 MKTARBALL := build/tools/mktarball.sh
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
-E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
 DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
 
-ifeq ($(ANDROID_COMPILE_WITH_JACK),true)
-DEFAULT_JACK_ENABLED:=full
-else
-DEFAULT_JACK_ENABLED:=
-endif
 ifneq ($(ANDROID_JACK_EXTRA_ARGS),)
+JACK_DEFAULT_ARGS :=
 DEFAULT_JACK_EXTRA_ARGS := $(ANDROID_JACK_EXTRA_ARGS)
 else
-DEFAULT_JACK_EXTRA_ARGS := @$(BUILD_SYSTEM)/jack-default.args
+JACK_DEFAULT_ARGS := $(BUILD_SYSTEM)/jack-default.args
+DEFAULT_JACK_EXTRA_ARGS := @$(JACK_DEFAULT_ARGS)
 endif
 # Turn off jack warnings by default.
 DEFAULT_JACK_EXTRA_ARGS += --verbose error
 
 PROGUARD := external/proguard/bin/proguard.sh
 JAVATAGS := build/tools/java-event-log-tags.py
-RMTYPEDEFS := $(HOST_OUT_EXECUTABLES)/rmtypedefs
+MERGETAGS := build/tools/merge-event-log-tags.py
+BUILD_IMAGE_SRCS := $(wildcard build/tools/releasetools/*.py)
 APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
@@ -707,7 +728,7 @@
 HOST_GLOBAL_LD_DIRS += -L$(HOST_OUT_INTERMEDIATE_LIBRARIES)
 TARGET_GLOBAL_LD_DIRS += -L$(TARGET_OUT_INTERMEDIATE_LIBRARIES)
 
-HOST_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $(HOST_OUT_HEADERS)
+HOST_PROJECT_INCLUDES:= $(SRC_HEADERS) $(HOST_OUT_HEADERS)
 TARGET_PROJECT_INCLUDES:= $(SRC_HEADERS) $(TARGET_OUT_HEADERS) \
 		$(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) \
 		$(TARGET_PRODUCT_KERNEL_HEADERS)
@@ -752,7 +773,7 @@
 HOST_CROSS_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
 HOST_CROSS_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
 HOST_CROSS_GLOBAL_LD_DIRS += -L$(HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES)
-HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $(HOST_CROSS_OUT_HEADERS)
+HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $(HOST_CROSS_OUT_HEADERS)
 HOST_CROSS_GLOBAL_CFLAGS += $(HOST_CROSS_RELEASE_CFLAGS)
 HOST_CROSS_GLOBAL_CPPFLAGS += $(HOST_CROSS_RELEASE_CPPFLAGS)
 
@@ -762,7 +783,7 @@
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_LD_DIRS += -L$($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES)
-$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_HEADERS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_HEADERS)
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CFLAGS += $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CFLAGS)
 $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CPPFLAGS += $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CPPFLAGS)
 endif
@@ -867,4 +888,12 @@
 RSCOMPAT_32BIT_ONLY_API_LEVELS := 8 9 10 11 12 13 14 15 16 17 18 19 20
 RSCOMPAT_NO_USAGEIO_API_LEVELS := 8 9 10 11 12 13
 
+ifeq ($(JAVA_NOT_REQUIRED),true)
+# Remove java and tools from our path so that we make sure nobody uses them.
+unexport ANDROID_JAVA_HOME
+unexport JAVA_HOME
+export ANDROID_BUILD_PATHS:=$(abspath $(BUILD_SYSTEM)/no_java_path):$(ANDROID_BUILD_PATHS)
+export PATH:=$(abspath $(BUILD_SYSTEM)/no_java_path):$(PATH)
+endif
+
 include $(BUILD_SYSTEM)/dumpvar.mk
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 6e96880..3573f0a 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -100,10 +100,8 @@
   endif
 endif
 
-ifneq ($(filter address,$(my_sanitize)),)
-  # Frame pointer based unwinder in ASan requires ARM frame setup.
-  LOCAL_ARM_MODE := arm
-  my_cflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
+# If local or global modules need ASAN, add linker flags.
+ifneq ($(filter address,$(my_global_sanitize) $(my_sanitize)),)
   my_ldflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS)
   ifdef LOCAL_IS_HOST_MODULE
     # -nodefaultlibs (provided with libc++) prevents the driver from linking
@@ -111,12 +109,20 @@
     my_ldlibs += -lm -lpthread
     my_ldflags += -Wl,--no-as-needed
   else
-    my_cflags += -mllvm -asan-globals=0
+    # Add asan libraries unless LOCAL_MODULE is the asan library.
     # ASan runtime library must be the first in the link order.
-    my_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RUNTIME_LIBRARY) \
-                           $(my_shared_libraries) \
-                           $(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
-    my_static_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)
+    ifeq (,$(filter $(LOCAL_MODULE),$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RUNTIME_LIBRARY)))
+      my_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RUNTIME_LIBRARY) \
+                             $(my_shared_libraries)
+    endif
+    ifeq (,$(filter $(LOCAL_MODULE),$(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)))
+      my_static_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)
+    endif
+
+    # Do not add unnecessary dependency in shared libraries.
+    ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
+      my_ldflags += -Wl,--as-needed
+    endif
 
     my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
     # Make sure linker_asan get installed.
@@ -124,6 +130,16 @@
   endif
 endif
 
+# If local module needs ASAN, add compiler flags.
+ifneq ($(filter address,$(my_sanitize)),)
+  # Frame pointer based unwinder in ASan requires ARM frame setup.
+  LOCAL_ARM_MODE := arm
+  my_cflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
+  ifndef LOCAL_IS_HOST_MODULE
+    my_cflags += -mllvm -asan-globals=0
+  endif
+endif
+
 ifneq ($(filter undefined,$(my_sanitize)),)
   ifndef LOCAL_IS_HOST_MODULE
     $(error ubsan is not yet supported on the target)
diff --git a/core/configure_local_jack.mk b/core/configure_local_jack.mk
index 2270c88..446bab7 100644
--- a/core/configure_local_jack.mk
+++ b/core/configure_local_jack.mk
@@ -21,16 +21,9 @@
 LOCAL_JACK_ENABLED := $(strip $(LOCAL_JACK_ENABLED))
 LOCAL_MODULE := $(strip $(LOCAL_MODULE))
 
-ifneq ($(LOCAL_JACK_ENABLED),full)
-ifneq ($(LOCAL_JACK_ENABLED),incremental)
-ifdef LOCAL_JACK_ENABLED
-ifneq ($(LOCAL_JACK_ENABLED),disabled)
+ifeq ($(filter full incremental,$(LOCAL_JACK_ENABLED)),)
 $(error $(LOCAL_PATH): invalid LOCAL_JACK_ENABLED "$(LOCAL_JACK_ENABLED)" for $(LOCAL_MODULE))
 endif
-endif
-LOCAL_JACK_ENABLED :=
-endif
-endif
 
 ifdef $(LOCAL_MODULE).JACK_VERSION
 LOCAL_JACK_VERSION := $($(LOCAL_MODULE).JACK_VERSION)
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index 37be1f7..b4ba7a2 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -105,13 +105,13 @@
 else ifeq ($(my_cxx_stl),ndk)
     # Using an NDK STL. Handled in binary.mk.
 else ifeq ($(my_cxx_stl),libstdc++)
-    # Using bionic's basic libstdc++. Not actually an STL. Only around until the
-    # tree is in good enough shape to not need it.
     ifndef LOCAL_IS_HOST_MODULE
-        my_c_includes += bionic/libstdc++/include
-        my_system_shared_libraries += libstdc++
+        $(error $(LOCAL_PATH): $(LOCAL_MODULE): libstdc++ is not supported for device modules)
+    else
+        # Host builds will use the system C++. libc++ on Darwin, GNU libstdc++ everywhere else
+        my_cppflags += $($(my_prefix)SYSTEMCPP_CPPFLAGS)
+        my_ldflags += $($(my_prefix)SYSTEMCPP_LDFLAGS)
     endif
-    # Host builds will use GNU libstdc++.
 else ifeq ($(my_cxx_stl),none)
     ifdef LOCAL_IS_HOST_MODULE
         my_cppflags += -nostdinc++
diff --git a/core/definitions.mk b/core/definitions.mk
index e2d5de4..74514bd 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -55,10 +55,6 @@
 # its sub-variables.)
 ALL_MODULE_NAME_TAGS:=
 
-# Full paths to all prebuilt files that will be copied
-# (used to make the dependency on acp)
-ALL_PREBUILT:=
-
 # Full path to all files that are made by some tool
 ALL_GENERATED_SOURCES:=
 
@@ -133,7 +129,6 @@
 define my-dir
 $(strip \
   $(eval LOCAL_MODULE_MAKEFILE := $$(lastword $$(MAKEFILE_LIST))) \
-  $(eval LOCAL_MODULE_MAKEFILE_DEP := $(if $(BUILDING_WITH_NINJA),,$$(LOCAL_MODULE_MAKEFILE))) \
   $(if $(filter $(BUILD_SYSTEM)/% $(OUT_DIR)/%,$(LOCAL_MODULE_MAKEFILE)), \
     $(error my-dir must be called before including any other makefile.) \
    , \
@@ -149,7 +144,10 @@
 define filter-soong-makefiles
 $(foreach mk,$(1),\
   $(if $(wildcard $(patsubst %/Android.mk,%/Android.bp,$(mk))),\
-    $(info skipping $(mk) ...),\
+    $(if $(wildcard $(patsubst %/Android.mk,%/Android.soong.mk,$(mk))),\
+      $(info skipping $(mk), but including Android.soong.mk ...)\
+        $(patsubst %/Android.mk,%/Android.soong.mk,$(mk)),\
+      $(info skipping $(mk) ...)),\
     $(mk)))
 endef
 else
@@ -425,7 +423,7 @@
 
 define find-subdir-assets
 $(sort $(if $(1),$(patsubst ./%,%, \
-	$(shell if [ -d $(1) ] ; then cd $(1) ; find -L ./ -not -name '.*' -and -type f -and -not -type l ; fi)), \
+	$(shell if [ -d $(1) ] ; then cd $(1) ; find -L ./ -not -name '.*' -and -type f ; fi)), \
 	$(warning Empty argument supplied to find-subdir-assets) \
 ))
 endef
@@ -723,12 +721,6 @@
 $(foreach lib,$(1),$(call _jack-lib-full-classes,$(lib),$(2)))
 endef
 
-# $(1): library name list
-# $(2): Non-empty if IS_HOST_MODULE
-define jack-lib-deps
-$(call jack-lib-files,$(1),$(2))
-endef
-
 ###########################################################
 ## Run rot13 on a string
 ## $(1): the string.  Must be one line.
@@ -918,15 +910,9 @@
 ###########################################################
 # $(1): the .P file
 # $(2): the main build target
-ifeq ($(BUILDING_WITH_NINJA),true)
 define include-depfile
 $(eval $(2) : .KATI_DEPFILE := $1)
 endef
-else
-define include-depfile
-$(eval -include $1)
-endef
-endif
 
 # $(1): object files
 define include-depfiles-for-objs
@@ -1136,7 +1122,7 @@
 define transform-logtags-to-java
 @mkdir -p $(dir $@)
 @echo "logtags: $@ <= $<"
-$(hide) $(JAVATAGS) -o $@ $^
+$(hide) $(JAVATAGS) -o $@ $< $(PRIVATE_MERGED_TAG)
 endef
 
 
@@ -1169,6 +1155,9 @@
 	$(addprefix --proto_path=, $(PRIVATE_PROTO_INCLUDES)) \
 	$(PRIVATE_PROTOC_FLAGS) \
 	$<
+@# aprotoc outputs only .cc. Rename it to .cpp if necessary.
+$(if $(PRIVATE_RENAME_CPP_EXT),\
+  $(hide) mv $(basename $@).cc $@)
 endef
 
 
@@ -1201,12 +1190,9 @@
 ## Commands for running gcc to compile a C++ file
 ###########################################################
 
-define transform-cpp-to-o
-@echo "target $(PRIVATE_ARM_MODE) C++: $(PRIVATE_MODULE) <= $<"
-@mkdir -p $(dir $@)
-$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
+define transform-cpp-to-o-compiler-args
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
-	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
+	$$(cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
@@ -1223,22 +1209,42 @@
 	$(PRIVATE_CPPFLAGS) \
 	$(PRIVATE_DEBUG_CFLAGS) \
 	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
-	$(PRIVATE_CPPFLAGS_NO_OVERRIDE) \
-	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
-$(transform-d-to-p)
+	$(PRIVATE_CPPFLAGS_NO_OVERRIDE)
 endef
 
+define clang-tidy-cpp
+$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
+  -checks=$(PRIVATE_TIDY_CHECKS) \
+  $< -- $(transform-cpp-to-o-compiler-args)
+endef
+
+ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
+define transform-cpp-to-o
+$(if $(PRIVATE_TIDY_CHECKS),
+  @echo "target tidy $(PRIVATE_ARM_MODE) C++: $<"
+  $(clang-tidy-cpp))
+endef
+else
+define transform-cpp-to-o
+@echo "target $(PRIVATE_ARM_MODE) C++: $(PRIVATE_MODULE) <= $<"
+@mkdir -p $(dir $@)
+$(if $(PRIVATE_TIDY_CHECKS),$(clang-tidy-cpp))
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
+  $(transform-cpp-to-o-compiler-args) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+$(hide) $(transform-d-to-p)
+endef
+endif
+
 
 ###########################################################
 ## Commands for running gcc to compile a C file
 ###########################################################
 
 # $(1): extra flags
-define transform-c-or-s-to-o-no-deps
-@mkdir -p $(dir $@)
-$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
+define transform-c-or-s-to-o-compiler-args
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
-	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
+	$$(cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
@@ -1250,27 +1256,47 @@
 	    $(PRIVATE_TARGET_GLOBAL_CONLYFLAGS) \
 	    $(PRIVATE_ARM_CFLAGS) \
 	 ) \
-	 $(1) \
-	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+	 $(1)
 endef
 
-define transform-c-to-o-no-deps
-@echo "target $(PRIVATE_ARM_MODE) C: $(PRIVATE_MODULE) <= $<"
-$(call transform-c-or-s-to-o-no-deps, \
-    $(PRIVATE_CFLAGS) \
-    $(PRIVATE_CONLYFLAGS) \
-    $(PRIVATE_DEBUG_CFLAGS) \
-    $(PRIVATE_CFLAGS_NO_OVERRIDE))
+define transform-c-to-o-compiler-args
+$(call transform-c-or-s-to-o-compiler-args, \
+  $(PRIVATE_CFLAGS) \
+  $(PRIVATE_CONLYFLAGS) \
+  $(PRIVATE_DEBUG_CFLAGS) \
+  $(PRIVATE_CFLAGS_NO_OVERRIDE))
 endef
 
+define clang-tidy-c
+$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
+  -checks=$(PRIVATE_TIDY_CHECKS) \
+  $< -- $(transform-c-to-o-compiler-args)
+endef
+
+ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
+define transform-c-to-o
+$(if $(PRIVATE_TIDY_CHECKS),
+  @echo "target tidy $(PRIVATE_ARM_MODE) C: $<"
+  $(clang-tidy-c))
+endef
+else
+define transform-c-to-o
+@echo "target $(PRIVATE_ARM_MODE) C: $(PRIVATE_MODULE) <= $<"
+@mkdir -p $(dir $@)
+$(if $(PRIVATE_TIDY_CHECKS),$(clang-tidy-c))
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
+  $(transform-c-to-o-compiler-args) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+$(hide) $(transform-d-to-p)
+endef
+endif
+
 define transform-s-to-o-no-deps
 @echo "target asm: $(PRIVATE_MODULE) <= $<"
-$(call transform-c-or-s-to-o-no-deps, $(PRIVATE_ASFLAGS))
-endef
-
-define transform-c-to-o
-$(transform-c-to-o-no-deps)
-$(transform-d-to-p)
+@mkdir -p $(dir $@)
+$(RELATIVE_PWD) $(PRIVATE_CC) \
+  $(call transform-c-or-s-to-o-compiler-args, $(PRIVATE_ASFLAGS)) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
 endef
 
 define transform-s-to-o
@@ -1308,12 +1334,9 @@
 ## Commands for running gcc to compile a host C++ file
 ###########################################################
 
-define transform-host-cpp-to-o
-@echo "$($(PRIVATE_PREFIX)DISPLAY) C++: $(PRIVATE_MODULE) <= $<"
-@mkdir -p $(dir $@)
-$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
+define transform-host-cpp-to-o-compiler-args
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
-	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
+	$$(cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
@@ -1328,22 +1351,41 @@
 	$(PRIVATE_CPPFLAGS) \
 	$(PRIVATE_DEBUG_CFLAGS) \
 	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
-	$(PRIVATE_CPPFLAGS_NO_OVERRIDE) \
-	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
-$(transform-d-to-p)
+	$(PRIVATE_CPPFLAGS_NO_OVERRIDE)
 endef
 
+define clang-tidy-host-cpp
+$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
+  -checks=$(PRIVATE_TIDY_CHECKS) \
+  $< -- $(transform-host-cpp-to-o-compiler-args)
+endef
+
+ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
+define transform-host-cpp-to-o
+$(if $(PRIVATE_TIDY_CHECKS),
+  @echo "tidy $($(PRIVATE_PREFIX)DISPLAY) C++: $<"
+  $(clang-tidy-host-cpp))
+endef
+else
+define transform-host-cpp-to-o
+@echo "$($(PRIVATE_PREFIX)DISPLAY) C++: $(PRIVATE_MODULE) <= $<"
+@mkdir -p $(dir $@)
+$(if $(PRIVATE_TIDY_CHECKS),$(clang-tidy-host-cpp))
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
+  $(transform-host-cpp-to-o-compiler-args) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+$(hide) $(transform-d-to-p)
+endef
+endif
+
 
 ###########################################################
 ## Commands for running gcc to compile a host C file
 ###########################################################
 
-# $(1): extra flags
-define transform-host-c-or-s-to-o-no-deps
-@mkdir -p $(dir $@)
-$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
+define transform-host-c-or-s-to-o-common-args
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
-	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
+	$$(cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
@@ -1353,27 +1395,53 @@
 	$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	    $(PRIVATE_HOST_GLOBAL_CFLAGS) \
 	    $(PRIVATE_HOST_GLOBAL_CONLYFLAGS) \
-	 ) \
-	$(1) \
-	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
-	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+	 )
 endef
 
-define transform-host-c-to-o-no-deps
-@echo "$($(PRIVATE_PREFIX)DISPLAY) C: $(PRIVATE_MODULE) <= $<"
-$(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_CONLYFLAGS) $(PRIVATE_DEBUG_CFLAGS))
+# $(1): extra flags
+define transform-host-c-or-s-to-o-no-deps
+@mkdir -p $(dir $@)
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
+  $(transform-host-c-or-s-to-o-common-args) \
+  $(1) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
 endef
 
+define transform-host-c-to-o-compiler-args
+  $(transform-host-c-or-s-to-o-common-args) \
+  $(PRIVATE_CFLAGS) $(PRIVATE_CONLYFLAGS) \
+  $(PRIVATE_DEBUG_CFLAGS) $(PRIVATE_CFLAGS_NO_OVERRIDE)
+endef
+
+define clang-tidy-host-c
+$(hide) $(PATH_TO_CLANG_TIDY) $(PRIVATE_TIDY_FLAGS) \
+  -checks=$(PRIVATE_TIDY_CHECKS) \
+  $< -- $(transform-host-c-to-o-compiler-args)
+endef
+
+ifneq (,$(filter 1 true,$(WITH_TIDY_ONLY)))
+define transform-host-c-to-o
+$(if $(PRIVATE_TIDY_CHECKS),
+  @echo "tidy $($(PRIVATE_PREFIX)DISPLAY) C: $<"
+  $(clang-tidy-host-c))
+endef
+else
+define transform-host-c-to-o
+@echo "$($(PRIVATE_PREFIX)DISPLAY) C: $(PRIVATE_MODULE) <= $<"
+@mkdir -p $(dir $@)
+$(if $(PRIVATE_TIDY_CHECKS), $(clang-tidy-host-c))
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
+  $(transform-host-c-to-o-compiler-args) \
+  -MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
+$(hide) $(transform-d-to-p)
+endef
+endif
+
 define transform-host-s-to-o-no-deps
 @echo "$($(PRIVATE_PREFIX)DISPLAY) asm: $(PRIVATE_MODULE) <= $<"
 $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_ASFLAGS))
 endef
 
-define transform-host-c-to-o
-$(transform-host-c-to-o-no-deps)
-$(transform-d-to-p)
-endef
-
 define transform-host-s-to-o
 $(transform-host-s-to-o-no-deps)
 $(transform-d-to-p)
@@ -1385,7 +1453,7 @@
 
 define transform-host-m-to-o-no-deps
 @echo "$($(PRIVATE_PREFIX)DISPLAY) ObjC: $(PRIVATE_MODULE) <= $<"
-$(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_DEBUG_CFLAGS))
+$(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_DEBUG_CFLAGS) $(PRIVATE_CFLAGS_NO_OVERRIDE))
 endef
 
 define transform-host-m-to-o
@@ -1569,6 +1637,23 @@
     $(call _extract-and-include-single-host-whole-static-lib, $(lib)))
 endef
 
+ifeq ($(HOST_OS),darwin)
+# On Darwin the host ar fails if there is nothing to add to .a at all.
+# We work around by adding a dummy.o and then deleting it.
+define create-dummy.o-if-no-objs
+$(if $(PRIVATE_ALL_OBJECTS),,$(hide) touch $(dir $@)dummy.o)
+endef
+
+define get-dummy.o-if-no-objs
+$(if $(PRIVATE_ALL_OBJECTS),,$(dir $@)dummy.o)
+endef
+
+define delete-dummy.o-if-no-objs
+$(if $(PRIVATE_ALL_OBJECTS),,$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) d $@ $(dir $@)dummy.o \
+  && rm -f $(dir $@)dummy.o)
+endef
+endif  # HOST_OS is darwin
+
 # Explicitly delete the archive first so that ar doesn't
 # try to add to an existing archive.
 define transform-host-o-to-static-lib
@@ -1576,9 +1661,11 @@
 @mkdir -p $(dir $@)
 @rm -f $@
 $(extract-and-include-host-whole-static-libs)
+$(create-dummy.o-if-no-objs)
 $(call split-long-arguments,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) \
     $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)GLOBAL_ARFLAGS) \
-    $(PRIVATE_ARFLAGS) $@,$(PRIVATE_ALL_OBJECTS))
+    $(PRIVATE_ARFLAGS) $@,$(PRIVATE_ALL_OBJECTS) $(get-dummy.o-if-no-objs))
+$(delete-dummy.o-if-no-objs)
 endef
 
 
@@ -1649,10 +1736,10 @@
 	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(PRIVATE_TARGET_LIBGCC) \
-	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
-	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
+	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
+	-o $@ \
 	$(PRIVATE_TARGET_CRTEND_SO_O) \
 	$(PRIVATE_LDLIBS)
 endef
@@ -1679,6 +1766,23 @@
   $(if $(PRIVATE_NO_DEBUGLINK),,$(TARGET_STRIP_EXTRA))
 endef
 
+define transform-to-stripped-keep-mini-debug-info
+@echo "target Strip (mini debug info): $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
+$(hide) $(PRIVATE_NM) -D $< --format=posix --defined-only | awk '{ print $$1 }' | sort >$@.dynsyms
+$(hide) $(PRIVATE_NM) $< --format=posix --defined-only | awk '{ if ($$2 == "T" || $$2 == "t" || $$2 == "D") print $$1 }' | sort >$@.funcsyms
+$(hide) comm -13 $@.dynsyms $@.funcsyms >$@.keep_symbols
+$(hide) $(PRIVATE_OBJCOPY) --only-keep-debug $< $@.debug
+$(hide) $(PRIVATE_OBJCOPY) --rename-section .debug_frame=saved_debug_frame $@.debug $@.mini_debuginfo
+$(hide) $(PRIVATE_OBJCOPY) -S --remove-section .gdb_index --remove-section .comment --keep-symbols=$@.keep_symbols $@.mini_debuginfo
+$(hide) $(PRIVATE_OBJCOPY) --rename-section saved_debug_frame=.debug_frame $@.mini_debuginfo
+$(hide) $(PRIVATE_STRIP) --strip-all -R .comment $< -o $@
+$(hide) rm -f $@.mini_debuginfo.xz
+$(hide) xz $@.mini_debuginfo
+$(hide) $(PRIVATE_OBJCOPY) --add-section .gnu_debugdata=$@.mini_debuginfo.xz $@
+$(hide) rm -f $@.dynsyms $@.funcsyms $@.keep_symbols $@.debug $@.mini_debuginfo.xz
+endef
+
 define transform-to-stripped-keep-symbols
 @echo "target Strip (keep symbols): $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
@@ -1720,10 +1824,10 @@
 	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(PRIVATE_TARGET_LIBGCC) \
-	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
-	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
+	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
+	-o $@ \
 	$(PRIVATE_TARGET_CRTEND_O) \
 	$(PRIVATE_LDLIBS)
 endef
@@ -2064,7 +2168,6 @@
 $(if $(PRIVATE_JAR_EXCLUDE_PACKAGES), $(hide) rm -rf \
     $(foreach pkg, $(PRIVATE_JAR_EXCLUDE_PACKAGES), \
         $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
-$(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(if $(PRIVATE_JAR_MANIFEST), \
     $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
@@ -2122,7 +2225,7 @@
     $(if $(PRIVATE_RMTYPEDEFS), \
         -D jack.android.remove-typedef="true") \
     $(addprefix --classpath ,$(strip \
-        $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \
+        $(call normalize-path-list,$(PRIVATE_JACK_SHARED_LIBRARIES)))) \
     $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \
     $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \
     -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
@@ -2163,11 +2266,10 @@
 $(hide) tr ' ' '\n' < $@.java-source-list \
     | sort -u > $@.java-source-list-uniq
 $(hide) if [ -s $@.java-source-list-uniq ] ; then \
-	$(call call-jack,$(PRIVATE_JACK_EXTRA_ARGS)) \
+	$(call call-jack) \
 	    $(strip $(PRIVATE_JACK_FLAGS)) \
-	    $(strip $(PRIVATE_JACK_DEBUG_FLAGS)) \
 	    $(addprefix --classpath ,$(strip \
-	        $(call normalize-path-list,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES)) $(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \
+	        $(call normalize-path-list,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES)) $(PRIVATE_JACK_SHARED_LIBRARIES)))) \
 	    -D jack.import.resource.policy=keep-first \
 	    -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
 	    -D jack.import.type.policy=keep-first \
@@ -2196,9 +2298,7 @@
 # Moves $1.tmp to $1 if necessary. This is designed to be used with
 # .KATI_RESTAT. For kati, this function doesn't update the timestamp
 # of $1 when $1.tmp is identical to $1 so that ninja won't rebuild
-# targets which depend on $1. For GNU make, this function simply
-# copies $1.tmp to $1.
-ifeq ($(BUILDING_WITH_NINJA),true)
+# targets which depend on $1.
 define commit-change-for-toc
 $(hide) if cmp -s $1.tmp $1 ; then \
  rm $1.tmp ; \
@@ -2206,12 +2306,6 @@
  mv $1.tmp $1 ; \
 fi
 endef
-else
-define commit-change-for-toc
-@# make doesn't support restat. We always update .toc files so the dependents will always be updated too.
-$(hide) mv $1.tmp $1
-endef
-endif
 
 ## Rule to create a table of contents from a .jar file.
 ## Must be called with $(eval).
@@ -2297,7 +2391,7 @@
     $(if $(NO_OPTIMIZE_DX), \
         -D jack.dex.optimize="false") \
     $(addprefix --classpath ,$(strip \
-        $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \
+        $(call normalize-path-list,$(PRIVATE_JACK_SHARED_LIBRARIES)))) \
     $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \
     $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \
     -D jack.import.resource.policy=keep-first \
@@ -2342,17 +2436,6 @@
 $(call create-empty-package-at,$@)
 endef
 
-# Copy an arhchive file and delete any class files and empty folders inside.
-# $(1): the source archive file.
-# $(2): the destination archive file.
-define initialize-package-file
-@mkdir -p $(dir $(2))
-$(hide) cp -f $(1) $(2)
-$(hide) zip -qd $(2) "*.class" \
-    $(if $(strip $(PRIVATE_DONT_DELETE_JAR_DIRS)),,"*/") \
-    || true # Ignore the error when nothing to delete.
-endef
-
 #TODO: we kinda want to build different asset packages for
 #      different configurations, then combine them later (or something).
 #      Per-locale, etc.
@@ -2506,19 +2589,6 @@
   fi
 endef
 
-define install-dex-debug
-$(hide) if [ -f "$(PRIVATE_INTERMEDIATES_DIR)/classes.dex" ]; then \
-	    mkdir -p $(TOP)/dalvik/DEBUG-FILES; \
-	    $(ACP) $(PRIVATE_INTERMEDIATES_DIR)/classes.dex \
-		$(TOP)/dalvik/DEBUG-FILES/$(PRIVATE_MODULE).dex; \
-	fi
-$(hide) if [ -f "$(PRIVATE_INTERMEDIATES_DIR)/classes.lst" ]; then \
-	    mkdir -p $(TOP)/dalvik/DEBUG-FILES; \
-	    $(ACP) $(PRIVATE_INTERMEDIATES_DIR)/classes.lst \
-		$(TOP)/dalvik/DEBUG-FILES/$(PRIVATE_MODULE).lst; \
-	fi
-endef
-
 # TODO(joeo): If we can ever upgrade to post 3.81 make and get the
 # new prebuilt rules to work, we should change this to copy the
 # resources to the out directory and then copy the resources.
@@ -2547,7 +2617,7 @@
 # $(1): source file
 # $(2): destination file
 define copy-one-file
-$(2): $(1) | $(ACP)
+$(2): $(1)
 	@echo "Copy: $$@"
 	$$(copy-file-to-target)
 endef
@@ -2568,7 +2638,7 @@
 # $(1): source file
 # $(2): destination file, must end with .xml.
 define copy-xml-file-checked
-$(2): $(1) | $(ACP)
+$(2): $(1)
 	@echo "Copy xml: $$@"
 	$(hide) xmllint $$< >/dev/null  # Don't print the xml file to stdout.
 	$$(copy-file-to-target)
@@ -2583,24 +2653,29 @@
 # Copy a single file from one place to another,
 # preserving permissions and overwriting any existing
 # file.
-# We disable the "-t" option for acp cannot handle
-# high resolution timestamp correctly on file systems like ext4.
-# Therefore copy-file-to-target is the same as copy-file-to-new-target.
+# When we used acp, it could not handle high resolution timestamps
+# on file systems like ext4. Because of that, '-t' option was disabled
+# and copy-file-to-target was identical to copy-file-to-new-target.
+# Keep the behavior until we audit and ensure that switching this back
+# won't break anything.
 define copy-file-to-target
 @mkdir -p $(dir $@)
-$(hide) $(ACP) -fp $< $@
+$(hide) rm -f $@
+$(hide) cp $< $@
 endef
 
 # The same as copy-file-to-target, but use the local
 # cp command instead of acp.
 define copy-file-to-target-with-cp
 @mkdir -p $(dir $@)
-$(hide) cp -fp $< $@
+$(hide) rm -f $@
+$(hide) cp -p $< $@
 endef
 
 # The same as copy-file-to-target, but use the zipalign tool to do so.
 define copy-file-to-target-with-zipalign
 @mkdir -p $(dir $@)
+$(hide) rm -f $@
 $(hide) $(ZIPALIGN) -f 4 $< $@
 endef
 
@@ -2608,6 +2683,7 @@
 # comments (for config files and such).
 define copy-file-to-target-strip-comments
 @mkdir -p $(dir $@)
+$(hide) rm -f $@
 $(hide) sed -e 's/#.*$$//' -e 's/[ \t]*$$//' -e '/^$$/d' < $< > $@
 endef
 
@@ -2615,14 +2691,16 @@
 # the old modification time.
 define copy-file-to-new-target
 @mkdir -p $(dir $@)
-$(hide) $(ACP) -fp $< $@
+$(hide) rm -f $@
+$(hide) cp $< $@
 endef
 
 # The same as copy-file-to-new-target, but use the local
 # cp command instead of acp.
 define copy-file-to-new-target-with-cp
 @mkdir -p $(dir $@)
-$(hide) cp -f $< $@
+$(hide) rm -f $@
+$(hide) cp $< $@
 endef
 
 # Copy a prebuilt file to a target location.
@@ -2656,6 +2734,35 @@
 
 
 ###########################################################
+## Commands to copy toolchain libraries
+###########################################################
+ifneq ($(USE_SOONG),true)
+# Used when Soong isn't defining our toolchain libraries
+# $(1): Name of library (libgcc, etc)
+define copy-toolchain-library
+$(call copy-toolchain-library-internal,\
+  $(call intermediates-dir-for,STATIC_LIBRARIES,$(1))/$(1).a,,$(1))
+ifdef TARGET_2ND_ARCH
+$(call copy-toolchain-library-internal,\
+  $(call intermediates-dir-for,STATIC_LIBRARIES,$(1),,,2ND_)/$(1).a,2ND_,$(1))
+endif
+endef
+
+# $(1): the intermediates library path
+# $(2): whether this is the 2nd target architecture
+# $(3): the name of the library without the extension
+define copy-toolchain-library-internal
+$(1): build/soong/scripts/copygcclib.sh $($(2)TARGET_CC)
+	@echo "Toolchain library: $(3)"
+	@mkdir -p $$(dir $$@)
+	$$(hide) rm -f $$@
+	$$(hide) build/soong/scripts/copygcclib.sh $$@ $($(2)TARGET_CC) $($(2)TARGET_GLOBAL_CFLAGS) -print-file-name=$(3).a
+
+$(call include-depfile,$(1).d,$(1))
+endef
+endif
+
+###########################################################
 ## Commands to call Proguard
 ###########################################################
 define transform-jar-to-proguard
@@ -2751,7 +2858,7 @@
 endef
 define add-radio-file-internal
 INSTALLED_RADIOIMAGE_TARGET += $$(PRODUCT_OUT)/$(2)
-$$(PRODUCT_OUT)/$(2) : $$(LOCAL_PATH)/$(1) | $$(ACP)
+$$(PRODUCT_OUT)/$(2) : $$(LOCAL_PATH)/$(1)
 	$$(transform-prebuilt-to-target)
 endef
 
@@ -2766,7 +2873,7 @@
 define add-radio-file-checked-internal
 INSTALLED_RADIOIMAGE_TARGET += $$(PRODUCT_OUT)/$(2)
 BOARD_INFO_CHECK += $(3):$(LOCAL_PATH)/$(1)
-$$(PRODUCT_OUT)/$(2) : $$(LOCAL_PATH)/$(1) | $$(ACP)
+$$(PRODUCT_OUT)/$(2) : $$(LOCAL_PATH)/$(1)
 	$$(transform-prebuilt-to-target)
 endef
 
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index d182dc0..fa8bb19 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -57,7 +57,7 @@
 _dbj_jar_no_dex := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(1)_nodex.jar
 _dbj_src_jar := $(call intermediates-dir-for,JAVA_LIBRARIES,$(1),,COMMON)/javalib.jar
 
-$$(_dbj_jar_no_dex) : $$(_dbj_src_jar) | $(ACP)
+$$(_dbj_jar_no_dex) : $$(_dbj_src_jar)
 	$$(call copy-file-to-target)
 ifneq ($(DEX_PREOPT_DEFAULT),nostripping)
 	$$(call dexpreopt-remove-classes.dex,$$@)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 2a7ffb7..feed330 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -77,11 +77,13 @@
 my_2nd_arch_prefix :=
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
 
+ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
 ifdef TARGET_2ND_ARCH
 my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
 my_2nd_arch_prefix :=
 endif
+endif
 
 
 ########################################################################
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 4e486d5..b7ecf2e 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -68,10 +68,12 @@
 # #################################################
 # Odex for the 2nd arch
 ifdef TARGET_2ND_ARCH
+ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
 ifneq (first,$(my_module_multilib))
 my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
 include $(BUILD_SYSTEM)/setup_one_odex.mk
 endif  # my_module_multilib is not first.
+endif  # TARGET_TRANSLATE_2ND_ARCH not true
 endif  # TARGET_2ND_ARCH
 # #################################################
 else  # must be APPS
@@ -104,8 +106,7 @@
 # Use pattern rule - we may have multiple installed odex files.
 # Ugly syntax - See the definition get-odex-file-path.
 $(installed_odex) : $(dir $(LOCAL_INSTALLED_MODULE))%$(notdir $(word 1,$(installed_odex))) \
-                  : $(dir $(LOCAL_BUILT_MODULE))%$(notdir $(word 1,$(built_odex))) \
-    | $(ACP)
+                  : $(dir $(LOCAL_BUILT_MODULE))%$(notdir $(word 1,$(built_odex)))
 	@echo "Install: $@"
 	$(copy-file-to-target)
 endif
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index 6bae25d..bcc5c18 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -5,6 +5,7 @@
 dpi_apk_name := $(LOCAL_MODULE)_$(my_dpi)
 dpi_intermediate := $(call intermediates-dir-for,APPS,$(dpi_apk_name))
 built_dpi_apk := $(dpi_intermediate)/package.apk
+additional_certificates := $(foreach c,$(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
 
 # Set up all the target-specific variables.
 $(built_dpi_apk): PRIVATE_MODULE := $(dpi_apk_name)
@@ -27,18 +28,12 @@
 $(built_dpi_apk): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
 $(built_dpi_apk): PRIVATE_PRIVATE_KEY := $(private_key)
 $(built_dpi_apk): PRIVATE_CERTIFICATE := $(certificate)
-$(built_dpi_apk): PRIVATE_ADDITIONAL_CERTIFICATES := $(foreach c,\
-    $(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
+$(built_dpi_apk): $(additional_certificates)
+$(built_dpi_apk): PRIVATE_ADDITIONAL_CERTIFICATES := $(additional_certificates)
 
-$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE :=
 ifneq ($(full_classes_jar),)
-$(built_dpi_apk): PRIVATE_DEX_FILE := $(built_dex)
-ifndef LOCAL_JACK_ENABLED
-# Use the jarjar processed arhive as the initial package file.
-$(built_dpi_apk): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-else
 $(built_dpi_apk): PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
-endif # LOCAL_JACK_ENABLED
+$(built_dpi_apk): PRIVATE_DEX_FILE := $(built_dex)
 $(built_dpi_apk): $(built_dex)
 else
 $(built_dpi_apk): PRIVATE_DEX_FILE :=
@@ -51,9 +46,7 @@
 $(built_dpi_apk) : $(AAPT)
 $(built_dpi_apk) : $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
-	$(if $(PRIVATE_SOURCE_ARCHIVE),\
-	  $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@),\
-	  $(create-empty-package))
+	$(create-empty-package)
 	$(add-assets-to-package)
 ifneq ($(jni_shared_libraries),)
 	$(add-jni-shared-libs-to-package)
@@ -63,10 +56,8 @@
 	$(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@))
 else
 	$(add-dex-to-package)
-ifdef LOCAL_JACK_ENABLED
 	$(add-carried-jack-resources)
 endif
-endif
 	$(sign-package)
 
 # Set up global variables to register this apk to the higher-level dependency graph.
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index f143579..f98df12 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -165,7 +165,6 @@
         $(droiddoc) \
         $(html_dir_files) \
         $(full_java_lib_deps) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	@echo Docs droiddoc: $(PRIVATE_OUT_DIR)
 	$(hide) mkdir -p $(dir $@)
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 91fd271..58f76b0 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -69,10 +69,10 @@
 
 ifeq (true,$(my_pack_module_relocations))
 # Pack relocations
-$(relocation_packer_output): $(relocation_packer_input) | $(ACP)
+$(relocation_packer_output): $(relocation_packer_input)
 	$(pack-elf-relocations)
 else
-$(relocation_packer_output): $(relocation_packer_input) | $(ACP)
+$(relocation_packer_output): $(relocation_packer_input)
 	@echo "target Unpacked: $(PRIVATE_MODULE) ($@)"
 	$(copy-file-to-target)
 endif
@@ -87,7 +87,7 @@
 endif
 symbolic_input := $(relocation_packer_output)
 symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-$(symbolic_output) : $(symbolic_input) | $(ACP)
+$(symbolic_output) : $(symbolic_input)
 	@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
 	$(copy-file-to-target)
 
@@ -116,11 +116,21 @@
   $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
   $(LOCAL_STRIP_MODULE))
 ifeq ($(my_strip_module),)
+  my_strip_module := mini-debug-info
+endif
+
+ifeq ($(my_strip_module),mini-debug-info)
+# Don't use mini-debug-info on mips (both 32-bit and 64-bit). objcopy checks that all
+# SH_MIPS_DWARF sections having name prefix .debug_ or .zdebug_, so there seems no easy
+# way using objcopy to remove all debug sections except .debug_frame on mips.
+ifneq ($(filter mips mips64,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
   my_strip_module := true
 endif
+endif
 
 $(strip_output): PRIVATE_STRIP := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
 $(strip_output): PRIVATE_OBJCOPY := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY)
+$(strip_output): PRIVATE_NM := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM)
 $(strip_output): PRIVATE_READELF := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_READELF)
 ifeq ($(my_strip_module),no_debuglink)
 $(strip_output): PRIVATE_NO_DEBUGLINK := true
@@ -128,7 +138,11 @@
 $(strip_output): PRIVATE_NO_DEBUGLINK :=
 endif
 
-ifneq ($(filter true no_debuglink,$(my_strip_module)),)
+ifeq ($(my_strip_module),mini-debug-info)
+# Strip the binary, but keep debug frames and symbol table in a compressed .gnu_debugdata section.
+$(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OBJCOPY) $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NM)
+	$(transform-to-stripped-keep-mini-debug-info)
+else ifneq ($(filter true no_debuglink,$(my_strip_module)),)
 # Strip the binary
 $(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
 	$(transform-to-stripped)
@@ -148,18 +162,9 @@
 else
 # Don't strip the binary, just copy it.  We can't skip this step
 # because a copy of the binary must appear at LOCAL_BUILT_MODULE.
-#
-# If the binary we're copying is acp or a prerequisite,
-# use cp(1) instead.
-ifneq ($(LOCAL_ACP_UNAVAILABLE),true)
-$(strip_output): $(strip_input) | $(ACP)
-	@echo "target Unstripped: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
-else
 $(strip_output): $(strip_input)
 	@echo "target Unstripped: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target-with-cp)
-endif
+	$(copy-file-to-target)
 endif # my_strip_module
 
 $(cleantarget): PRIVATE_CLEAN_FILES += \
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 0a72603..850e475 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -324,10 +324,19 @@
 
 # Out for TARGET_2ND_ARCH
 TARGET_2ND_ARCH_VAR_PREFIX := $(HOST_2ND_ARCH_VAR_PREFIX)
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+# With this you can reference the arm binary translation library with libfoo_arm in PRODUCT_PACKAGES.
+TARGET_2ND_ARCH_MODULE_SUFFIX := _$(TARGET_2ND_ARCH)
+else
 TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)/lib
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
+else
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_RENDERSCRIPT_BITCODE := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_EXECUTABLES := $(TARGET_OUT_EXECUTABLES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS := $(TARGET_OUT_APPS)
@@ -353,8 +362,13 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_EXECUTABLES := $(TARGET_OUT_DATA_EXECUTABLES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_SHARED_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_APPS := $(TARGET_OUT_DATA_APPS)
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest/$(TARGET_2ND_ARCH)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest/$(TARGET_2ND_ARCH)
+else
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest
+endif
 
 TARGET_OUT_CACHE := $(PRODUCT_OUT)/cache
 
@@ -374,10 +388,15 @@
 endif
 TARGET_OUT_VENDOR_JAVA_LIBRARIES := $(TARGET_OUT_VENDOR)/framework
 TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR)/app
+TARGET_OUT_VENDOR_APPS_PRIVILEGED := $(TARGET_OUT_VENDOR)/priv-app
 TARGET_OUT_VENDOR_ETC := $(TARGET_OUT_VENDOR)/etc
 
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_EXECUTABLES := $(TARGET_OUT_VENDOR_EXECUTABLES)
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib/$(TARGET_2ND_ARCH)
+else
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR_APPS)
 
 TARGET_OUT_OEM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_OEM)
@@ -393,7 +412,11 @@
 TARGET_OUT_OEM_ETC := $(TARGET_OUT_OEM)/etc
 
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM_EXECUTABLES)
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib/$(TARGET_2ND_ARCH)
+else
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM_APPS)
 
 TARGET_OUT_ODM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ODM)
@@ -407,7 +430,11 @@
 TARGET_OUT_ODM_ETC := $(TARGET_OUT_ODM)/etc
 
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_EXECUTABLES := $(TARGET_OUT_ODM_EXECUTABLES)
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib/$(TARGET_2ND_ARCH)
+else
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib
+endif
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
 
 TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
diff --git a/core/executable.mk b/core/executable.mk
index e22ea0e..27c033d 100644
--- a/core/executable.mk
+++ b/core/executable.mk
@@ -17,6 +17,15 @@
 
 ifneq (true,$(my_skip_this_target))
 
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+# If a native test explicity specifies to build only for the translation arch,
+# we'll still need LOCAL_MULTILIB=both and let module_arch_supported.mk choose
+# to build only for TARGET_2ND_ARCH.
+ifneq (1,$(words $(LOCAL_MODULE_TARGET_ARCH)))
+LOCAL_MULTILIB := first
+endif
+endif
+
 my_prefix := TARGET_
 include $(BUILD_SYSTEM)/multilib.mk
 
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index febea98..3808412 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -38,9 +38,9 @@
 ifeq ($(LOCAL_NO_LIBGCC),true)
 my_target_libgcc :=
 else
-my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
+my_target_libgcc := $(call intermediates-dir-for,STATIC_LIBRARIES,libgcc,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libgcc.a
 endif
-my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
+my_target_libatomic := $(call intermediates-dir-for,STATIC_LIBRARIES,libatomic,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libatomic.a
 ifeq ($(LOCAL_NO_CRT),true)
 my_target_crtbegin_dynamic_o :=
 my_target_crtbegin_static_o :=
@@ -73,11 +73,11 @@
 $(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
 
 ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o)
+$(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libgcc) $(my_target_libatomic)
 	$(transform-o-to-static-executable)
 	$(PRIVATE_POST_LINK_CMD)
 else
-$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o)
+$(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o) $(my_target_libgcc) $(my_target_libatomic)
 	$(transform-o-to-executable)
 	$(PRIVATE_POST_LINK_CMD)
 endif
diff --git a/core/executable_prefer_symlink.mk b/core/executable_prefer_symlink.mk
index 931550f..1640b32 100644
--- a/core/executable_prefer_symlink.mk
+++ b/core/executable_prefer_symlink.mk
@@ -42,7 +42,7 @@
 # $(my_symlink) doesn't need to depend on $(PRIVATE_SRC_BINARY_NAME): we can generate symlink to nonexistent file.
 # If you add the dependency, make would compare the timestamp of a file against that of its symlink:
 # they are always equal, because make follows symlink.
-$(my_symlink): $(LOCAL_MODULE_MAKEFILE_DEP)
+$(my_symlink):
 	@echo "Symlink: $@ -> $(PRIVATE_SRC_BINARY_NAME)"
 	@mkdir -p $(dir $@)
 	@rm -rf $@
diff --git a/core/goma.mk b/core/goma.mk
index 6535b3e..0d5f428 100644
--- a/core/goma.mk
+++ b/core/goma.mk
@@ -16,16 +16,6 @@
 
 # Notice: this works only with Google's Goma build infrastructure.
 ifneq ($(filter-out false,$(USE_GOMA)),)
-  # Check if USE_NINJA is not false because GNU make won't work well
-  # with goma. Note this file is evaluated twice, once by GNU make and
-  # once by kati with USE_NINJA=false. We do this check in the former
-  # pass.
-  ifndef KATI
-    ifeq ($(USE_NINJA),false)
-      $(error USE_GOMA=true is not compatible with USE_NINJA=false)
-    endif
-  endif
-
   # Goma requires a lot of processes and file descriptors.
   ifeq ($(shell echo $$(($$(ulimit -u) < 2500 || $$(ulimit -n) < 16000))),1)
     $(warning Max user processes and/or open files are insufficient)
@@ -56,11 +46,7 @@
   # gomacc can start goma client's daemon process automatically, but
   # it is safer and faster to start up it beforehand. We run this as a
   # background process so this won't slow down the build.
-  # We use "ensure_start" command when the compiler_proxy is already
-  # running and uses GOMA_HERMETIC=error flag. The compiler_proxy will
-  # restart otherwise.
-  # TODO(hamaji): Remove this condition after http://b/25676777 is fixed.
-  $(shell ( if ( curl http://localhost:$$($(GOMA_CC) port)/flagz | grep GOMA_HERMETIC=error ); then cmd=ensure_start; else cmd=restart; fi; GOMA_HERMETIC=error $(goma_ctl) $${cmd} ) &> /dev/null &)
+  $(shell ( GOMA_HERMETIC=error $(goma_ctl) ensure_start ) &> /dev/null &)
 
   goma_ctl :=
   goma_dir :=
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 83047d4..7fdf249 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -23,36 +23,34 @@
 ifeq ($(HOST_OS),linux)
 USE_CORE_LIB_BOOTCLASSPATH := true
 
+#################################
+include $(BUILD_SYSTEM)/configure_local_jack.mk
+#################################
+
 #######################################
 include $(BUILD_SYSTEM)/host_java_library_common.mk
 #######################################
-
-ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-  LOCAL_JAVA_LIBRARIES += core-oj-hostdex core-libart-hostdex
+ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
+  # For static library, $(LOCAL_BUILT_MODULE) is $(full_classes_jack).
+  LOCAL_BUILT_MODULE_STEM := classes.jack
 endif
 
-full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
-full_classes_jar := $(intermediates.COMMON)/classes.jar
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+  LOCAL_JAVA_LIBRARIES :=  core-oj-hostdex core-libart-hostdex $(LOCAL_JAVA_LIBRARIES)
+endif
+
 full_classes_jack := $(intermediates.COMMON)/classes.jack
 jack_check_timestamp := $(intermediates.COMMON)/jack.check.timestamp
 built_dex := $(intermediates.COMMON)/classes.dex
 
 LOCAL_INTERMEDIATE_TARGETS += \
-    $(full_classes_compiled_jar) \
-    $(full_classes_jarjar_jar) \
     $(full_classes_jack) \
-    $(full_classes_jar) \
     $(jack_check_timestamp) \
     $(built_dex)
 
 # See comment in java.mk
 ifndef LOCAL_CHECKED_MODULE
-ifdef LOCAL_JACK_ENABLED
 LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
-else
-LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
-endif
 endif
 
 #######################################
@@ -64,80 +62,32 @@
 
 include $(BUILD_SYSTEM)/java_common.mk
 
-# The layers file allows you to enforce a layering between java packages.
-# Run build/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-
 $(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
 
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
-$(full_classes_compiled_jar): \
-        $(java_sources) \
-        $(java_resource_sources) \
-        $(full_java_lib_deps) \
-        $(jar_manifest_file) \
-        $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
-	$(transform-host-java-to-package)
-
-# Run jarjar if necessary, otherwise just copy the file.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jarjar_jar): $(full_classes_compiled_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo JarJar: $@
-	$(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
-else
-$(full_classes_jarjar_jar): $(full_classes_compiled_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
-endif
-
-$(full_classes_jar): $(full_classes_jarjar_jar) | $(ACP)
-	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
-
-ifndef LOCAL_JACK_ENABLED
-
-$(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(LOCAL_BUILT_MODULE): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
-$(LOCAL_BUILT_MODULE): $(built_dex) $(java_resource_sources)
-	@echo "Host Jar: $(PRIVATE_MODULE) ($@)"
-	$(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@)
-	$(add-dex-to-package)
-
-else # LOCAL_JACK_ENABLED
 $(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
+  PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
 
 ifeq ($(LOCAL_JACK_ENABLED),incremental)
 $(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-incremental
+  PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-incremental
 else
 $(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_JACK_INCREMENTAL_DIR :=
+  PRIVATE_JACK_INCREMENTAL_DIR :=
 endif
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_MIN_SDK_VERSION := $(PLATFORM_JACK_MIN_SDK_VERSION)
 
 jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
-        $(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_MODULE_MAKEFILE_DEP) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK)
+        $(jar_manifest_file) $(proto_java_sources_file_stamp) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(NORMALIZE_PATH) $(JACK_DEFAULT_ARGS) $(JACK)
+
+ifneq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
 $(built_dex): PRIVATE_CLASSES_JACK := $(full_classes_jack)
 $(built_dex): $(jack_all_deps) | setup-jack-server
 	@echo Building with Jack: $@
 	$(jack-java-to-dex)
 
-$(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
-	@echo Checking build with Jack: $@
-	$(jack-check-java)
-
 # $(full_classes_jack) is just by-product of $(built_dex).
 # The dummy command was added because, without it, make misses the fact the $(built_dex) also
 # change $(full_classes_jack).
@@ -151,7 +101,16 @@
 	$(add-dex-to-package)
 	$(add-carried-jack-resources)
 
-endif # LOCAL_JACK_ENABLED
+else  # LOCAL_IS_STATIC_JAVA_LIBRARY
+$(full_classes_jack): $(jack_all_deps) | setup-jack-server
+	@echo Building with Jack: $@
+	$(java-to-jack)
+
+endif  # LOCAL_IS_STATIC_JAVA_LIBRARY
+
+$(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
+	@echo Checking build with Jack: $@
+	$(jack-check-java)
 
 USE_CORE_LIB_BOOTCLASSPATH :=
 
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
index c296be3..b79c0ea 100644
--- a/core/host_dalvik_static_java_library.mk
+++ b/core/host_dalvik_static_java_library.mk
@@ -19,42 +19,9 @@
 # These libraries will be compiled against libcore and not the host
 # JRE.
 #
-ifeq ($(HOST_OS),linux)
-
 LOCAL_UNINSTALLABLE_MODULE := true
 LOCAL_IS_STATIC_JAVA_LIBRARY := true
-USE_CORE_LIB_BOOTCLASSPATH := true
-LOCAL_JAVA_LIBRARIES += core-oj-hostdex core-libart-hostdex
 
-intermediates.COMMON := $(call intermediates-dir-for,JAVA_LIBRARIES,$(LOCAL_MODULE),true,COMMON,)
-full_classes_jack := $(intermediates.COMMON)/classes.jack
-LOCAL_INTERMEDIATE_TARGETS += \
-    $(full_classes_jack)
+include $(BUILD_SYSTEM)/host_dalvik_java_library.mk
 
-include $(BUILD_SYSTEM)/host_java_library.mk
-# proguard is not supported
-# *.proto files are not supported
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_MIN_SDK_VERSION := $(PLATFORM_JACK_MIN_SDK_VERSION)
-
-$(full_classes_jack): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jack): \
-	PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
-ifeq ($(LOCAL_JACK_ENABLED),incremental)
-$(full_classes_jack): \
-	PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-incremental
-else
-$(full_classes_jack): \
-	PRIVATE_JACK_INCREMENTAL_DIR :=
-endif
-$(full_classes_jack): $(java_sources) $(java_resource_sources) $(full_jack_deps) \
-        $(jar_manifest_file) $(layers_file) $(LOCAL_MODULE_MAKEFILE_DEP) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
-        $(JACK) | setup-jack-server
-	@echo Building with Jack: $@
-	$(java-to-jack)
-
-USE_CORE_LIB_BOOTCLASSPATH :=
 LOCAL_IS_STATIC_JAVA_LIBRARY :=
-endif
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 97079fd..9aa2a7c 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -66,10 +66,12 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(NORMALIZE_PATH) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-java-to-package)
 
+javac-check : $(full_classes_compiled_jar)
+
 # Run jarjar if necessary, otherwise just copy the file.
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
@@ -97,13 +99,12 @@
 $(full_classes_emma_jar) : $(full_classes_jarjar_jar) | $(EMMA_JAR)
 	$(transform-classes.jar-to-emma)
 
-$(built_javalib_jar) : $(full_classes_emma_jar)
+$(LOCAL_BUILT_MODULE) : $(full_classes_emma_jar)
 	@echo Copying: $@
 	$(hide) $(ACP) -fp $< $@
 
 else # LOCAL_EMMA_INSTRUMENT
-$(built_javalib_jar): $(full_classes_jarjar_jar) | $(ACP)
+$(LOCAL_BUILT_MODULE) : $(full_classes_jarjar_jar) | $(ACP)
 	@echo Copying: $@
 	$(hide) $(ACP) -fp $< $@
 endif # LOCAL_EMMA_INSTRUMENT
-
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 35a6e28..8df4b37 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -26,19 +26,6 @@
 intermediates := $(call local-intermediates-dir)
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 
-built_javalib_jar := $(intermediates)/javalib.jar
-
-#################################
-include $(BUILD_SYSTEM)/configure_local_jack.mk
-#################################
-
-ifdef LOCAL_JACK_ENABLED
-ifdef LOCAL_IS_STATIC_JAVA_LIBRARY
-LOCAL_BUILT_MODULE_STEM := classes.jack
-LOCAL_INTERMEDIATE_TARGETS += $(built_javalib_jar)
-endif
-endif
-
 # base_rules.mk looks at this
 all_res_assets :=
 
@@ -61,4 +48,3 @@
 
 LOCAL_INTERMEDIATE_SOURCE_DIR := $(intermediates.COMMON)/src
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
-
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index 272e76f..bfbde21 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -44,7 +44,6 @@
 $(LOCAL_BUILT_MODULE): \
         $(all_objects) \
         $(all_libraries) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-o-to-shared-lib)
 
diff --git a/core/host_test_internal.mk b/core/host_test_internal.mk
index 7f6aff0..6c52e64 100644
--- a/core/host_test_internal.mk
+++ b/core/host_test_internal.mk
@@ -5,10 +5,9 @@
 LOCAL_CFLAGS_windows += -DGTEST_OS_WINDOWS
 LOCAL_CFLAGS_linux += -DGTEST_OS_LINUX
 LOCAL_LDLIBS_linux += -lpthread
-LOCAL_CFLAGS_darwin += -DGTEST_OS_LINUX
+LOCAL_CFLAGS_darwin += -DGTEST_OS_MAC
 LOCAL_LDLIBS_darwin += -lpthread
 
 LOCAL_CFLAGS += -DGTEST_HAS_STD_STRING -O0 -g
-LOCAL_C_INCLUDES +=  external/gtest/include
 
 LOCAL_STATIC_LIBRARIES += libgtest_main_host libgtest_host
diff --git a/core/java.mk b/core/java.mk
index bc8ed64..1a1de66 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -96,34 +96,14 @@
 intermediates := $(call local-intermediates-dir)
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 
-# Choose leaf name for the compiled jar file.
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-full_classes_compiled_jar_leaf := classes-no-debug-var.jar
-built_dex_intermediate_leaf := no-local
-else
-full_classes_compiled_jar_leaf := classes-full-debug.jar
-built_dex_intermediate_leaf := with-local
-endif
-
 ifeq ($(LOCAL_PROGUARD_ENABLED),disabled)
 LOCAL_PROGUARD_ENABLED :=
 endif
 
-ifdef LOCAL_PROGUARD_ENABLED
-proguard_jar_leaf := proguard.classes.jar
-else
-proguard_jar_leaf := noproguard.classes.jar
-endif
-
-full_classes_compiled_jar := $(intermediates.COMMON)/$(full_classes_compiled_jar_leaf)
-jarjar_leaf := classes-jarjar.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/$(jarjar_leaf)
-emma_intermediates_dir := $(intermediates.COMMON)/emma_out
-# emma is hardcoded to use the leaf name of its input for the output file --
-# only the output directory can be changed
-full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(jarjar_leaf)
-full_classes_proguard_jar := $(intermediates.COMMON)/$(proguard_jar_leaf)
-built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)/classes.dex
+full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
+full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
+full_classes_proguard_jar := $(intermediates.COMMON)/proguard.classes.jar
+built_dex_intermediate := $(intermediates.COMMON)/dex-dir/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 
 ifeq ($(LOCAL_MODULE_CLASS)$(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),APPS)
@@ -143,7 +123,6 @@
 LOCAL_INTERMEDIATE_TARGETS += \
     $(full_classes_compiled_jar) \
     $(full_classes_jarjar_jar) \
-    $(full_classes_emma_jar) \
     $(full_classes_jar) \
     $(full_classes_proguard_jar) \
     $(built_dex_intermediate) \
@@ -327,7 +306,6 @@
 
 $(aidl_java_sources): $(intermediates.COMMON)/src/%.java: \
         $(LOCAL_PATH)/%.aidl \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES) \
         $(AIDL) \
         $(aidl_preprocess_import)
@@ -347,11 +325,7 @@
 # command line.
 ifndef LOCAL_CHECKED_MODULE
 ifdef full_classes_jar
-ifdef LOCAL_JACK_ENABLED
 LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
-else
-LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
-endif
 endif
 endif
 
@@ -367,7 +341,8 @@
 logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/src/, $(logtags_sources)))
 logtags_sources := $(addprefix $(LOCAL_PATH)/, $(logtags_sources))
 
-$(logtags_java_sources): $(intermediates.COMMON)/src/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt
+$(logtags_java_sources): PRIVATE_MERGED_TAG := $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt
+$(logtags_java_sources): $(intermediates.COMMON)/src/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt $(JAVATAGS) build/tools/event_log_tags.py
 	$(transform-logtags-to-java)
 
 else
@@ -422,10 +397,6 @@
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
 
-ifdef LOCAL_RMTYPEDEFS
-$(full_classes_compiled_jar): | $(RMTYPEDEFS)
-endif
-
 # Compile the java files to a .jar file.
 # This intentionally depends on java_sources, not all_java_sources.
 # Deps for generated source files must be handled separately,
@@ -443,10 +414,12 @@
         $(layers_file) \
         $(RenderScript_file_stamp) \
         $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(NORMALIZE_PATH) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-java-to-classes.jar)
 
+javac-check : $(full_classes_compiled_jar)
+
 # Run jarjar if necessary, otherwise just copy the file.
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
@@ -459,31 +432,8 @@
 	$(hide) $(ACP) -fp $< $@
 endif
 
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.emma.ignore
-$(full_classes_emma_jar): PRIVATE_EMMA_INTERMEDIATES_DIR := $(emma_intermediates_dir)
-# module level coverage filter can be defined using LOCAL_EMMA_COVERAGE_FILTER
-# in Android.mk
-ifdef LOCAL_EMMA_COVERAGE_FILTER
-$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := $(LOCAL_EMMA_COVERAGE_FILTER)
-else
-# by default, avoid applying emma instrumentation onto emma classes itself,
-# otherwise there will be exceptions thrown
-$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := *,-emma,-emmarun,-com.vladium.*
-endif
-# this rule will generate both $(PRIVATE_EMMA_COVERAGE_FILE) and
-# $(full_classes_emma_jar)
-$(full_classes_emma_jar): $(full_classes_jarjar_jar) | $(EMMA_JAR)
-	$(transform-classes.jar-to-emma)
-
-else
-$(full_classes_emma_jar): $(full_classes_jarjar_jar) | $(ACP)
-	@echo Copying: $@
-	$(copy-file-to-target)
-endif
-
 # Keep a copy of the jar just before proguard processing.
-$(full_classes_jar): $(full_classes_emma_jar) | $(ACP)
+$(full_classes_jar): $(full_classes_jarjar_jar) | $(ACP)
 	@echo Copying: $@
 	$(hide) $(ACP) -fp $< $@
 
@@ -526,22 +476,25 @@
 
 common_proguard_flags := -forceprocessing
 
+common_proguard_flag_files :=
 ifeq ($(filter nosystem,$(LOCAL_PROGUARD_ENABLED)),)
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.flags
+common_proguard_flag_files += $(BUILD_SYSTEM)/proguard.flags
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-ifdef LOCAL_JACK_ENABLED
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
-else
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.emma.flags
-endif # LOCAL_JACK_ENABLED
+common_proguard_flag_files += $(BUILD_SYSTEM)/proguard.jacoco.flags
 endif
 # If this is a test package, add proguard keep flags for tests.
 ifneq ($(LOCAL_INSTRUMENTATION_FOR)$(filter tests,$(LOCAL_MODULE_TAGS)),)
-common_proguard_flags += -include $(BUILD_SYSTEM)/proguard_tests.flags
+common_proguard_flag_files += $(BUILD_SYSTEM)/proguard_tests.flags
 ifeq ($(filter shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
 common_proguard_flags += -dontshrink # don't shrink tests by default
 endif # shrinktests
 endif # test package
+ifneq ($(common_proguard_flag_files),)
+common_proguard_flags += $(addprefix -include , $(common_proguard_flag_files))
+# This is included from $(BUILD_SYSTEM)/proguard.flags
+common_proguard_flag_files += $(BUILD_SYSTEM)/proguard_basic_keeps.flags
+endif
+
 ifeq ($(filter obfuscation,$(LOCAL_PROGUARD_ENABLED)),)
 # By default no obfuscation
 common_proguard_flags += -dontobfuscate
@@ -567,10 +520,9 @@
     -applymapping $(link_instr_intermediates_dir.COMMON)/proguard_dictionary \
     -verbose \
     $(legacy_proguard_flags)
-ifdef LOCAL_JACK_ENABLED
+
 jack_proguard_flags += -applymapping $(link_instr_intermediates_dir.COMMON)/jack_dictionary
 full_jack_deps += $(link_instr_intermediates_dir.COMMON)/jack_dictionary
-endif
 
 # Sometimes (test + main app) uses different keep rules from the main app -
 # apply the main app's dictionary anyway.
@@ -593,40 +545,21 @@
 endif
 $(full_classes_proguard_jar): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
 $(full_classes_proguard_jar): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
-$(full_classes_proguard_jar) : $(full_classes_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(proguard_flag_files) | $(ACP) $(PROGUARD)
+$(full_classes_proguard_jar) : $(full_classes_jar) $(extra_input_jar) $(my_support_library_sdk_raise) $(common_proguard_flag_files) $(proguard_flag_files) | $(PROGUARD)
 	$(call transform-jar-to-proguard)
 
 else  # LOCAL_PROGUARD_ENABLED not defined
-$(full_classes_proguard_jar) : $(full_classes_jar)
+$(full_classes_proguard_jar) : $(full_classes_jar) | $(ACP)
 	@echo Copying: $@
 	$(hide) $(ACP) -fp $< $@
 
 endif # LOCAL_PROGUARD_ENABLED defined
 
-ifndef LOCAL_JACK_ENABLED
-# Override PRIVATE_INTERMEDIATES_DIR so that install-dex-debug
-# will work even when intermediates != intermediates.COMMON.
-$(built_dex_intermediate): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
-$(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-# If you instrument class files that have local variable debug information in
-# them emma does not correctly maintain the local variable table.
-# This will cause an error when you try to convert the class files for Android.
-# The workaround here is to build different dex file here based on emma switch
-# then later copy into classes.dex. When emma is on, dx is run with --no-locals
-# option to remove local variable information
-ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-$(built_dex_intermediate): PRIVATE_DX_FLAGS += --no-locals
-endif
-endif # LOCAL_JACK_ENABLED is disabled
-
 $(built_dex): $(built_dex_intermediate) | $(ACP)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) rm -f $(dir $@)/classes*.dex
 	$(hide) $(ACP) -fp $(dir $<)/classes*.dex $(dir $@)
-ifneq ($(GENERATE_DEX_DEBUG),)
-	$(install-dex-debug)
-endif
 
 findbugs_xml := $(intermediates.COMMON)/findbugs.xml
 $(findbugs_xml): PRIVATE_AUXCLASSPATH := $(addprefix -auxclasspath ,$(strip \
@@ -654,7 +587,6 @@
 
 endif  # full_classes_jar is defined
 
-ifdef LOCAL_JACK_ENABLED
 $(LOCAL_INTERMEDIATE_TARGETS): \
 	PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
 ifeq ($(LOCAL_JACK_ENABLED),incremental)
@@ -695,9 +627,10 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
 
 jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
-        $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) $(proguard_flag_files) \
+        $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
+        $(common_proguard_flag_files) $(proguard_flag_files) \
         $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) $(JACK)
+        $(NORMALIZE_PATH) $(JACK_DEFAULT_ARGS) $(JACK)
 
 $(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
 	@echo Checking build with Jack: $@
@@ -747,4 +680,3 @@
 	@echo Building with Jack: $@
 	$(java-to-jack)
 endif  # full_classes_jar is defined
-endif # LOCAL_JACK_ENABLED
diff --git a/core/java_common.mk b/core/java_common.mk
index 9b7d10f..bdf9828 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -19,7 +19,12 @@
   ifneq (,$(filter $(LOCAL_SDK_VERSION), $(private_sdk_versions_without_any_java_18_support)))
     LOCAL_JAVA_LANGUAGE_VERSION := 1.7
   else
-    LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+    # This retains 1.7 for ART build bots only. http://b/27583810
+    ifeq (,$(LEGACY_USE_JAVA7))
+      LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+    else
+      LOCAL_JAVA_LANGUAGE_VERSION := 1.7
+    endif
   endif
 endif
 LOCAL_JAVACFLAGS += -source $(LOCAL_JAVA_LANGUAGE_VERSION) -target $(LOCAL_JAVA_LANGUAGE_VERSION)
@@ -128,6 +133,15 @@
   extra_jar_args :=
 endif # java_resource_file_groups
 
+#####################################
+## Warn if there is unrecognized file in LOCAL_SRC_FILES.
+my_unknown_src_files := $(filter-out \
+  %.java %.aidl %.proto %.logtags %.fs %.rs, \
+  $(LOCAL_SRC_FILES) $(LOCAL_INTERMEDIATE_SOURCES) $(LOCAL_GENERATED_SOURCES))
+ifneq ($(my_unknown_src_files),)
+$(warning $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Unused source files: $(my_unknown_src_files))
+endif
+
 ######################################
 ## PRIVATE java vars
 # LOCAL_SOURCE_FILES_ALL_GENERATED is set only if the module does not have static source files,
@@ -305,7 +319,6 @@
 ###########################################################
 # JACK
 ###########################################################
-ifdef LOCAL_JACK_ENABLED
 ifdef need_compile_java
 
 LOCAL_JACK_FLAGS += -D jack.java.source.version=$(LOCAL_JAVA_LANGUAGE_VERSION)
@@ -317,54 +330,16 @@
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JACK_LIBRARIES := $(full_static_jack_libs)
 
-ifndef LOCAL_IS_HOST_MODULE
-ifeq ($(LOCAL_SDK_VERSION),)
-ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-my_bootclasspath :=
-else
-my_bootclasspath := $(call jack-lib-files,core-oj core-libart)
-endif
-else  # LOCAL_SDK_VERSION
-ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
-# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
-my_bootclasspath := $(call jack-lib-files,android_stubs_current)
-else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
-my_bootclasspath := $(call jack-lib-files,android_system_stubs_current)
-else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
-my_bootclasspath := $(call jack-lib-files,android_test_stubs_current)
-else
-my_bootclasspath :=$(call jack-lib-files,sdk_v$(LOCAL_SDK_VERSION))
-endif # current, system_current, or test_current
-endif # LOCAL_SDK_VERSION
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath)
-
 full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+full_jack_deps := $(full_shared_jack_libs)
+
+ifndef LOCAL_IS_HOST_MODULE
 # Turn off .toc optimization for apps build as we cannot build dexdump.
 ifeq (,$(TARGET_BUILD_APPS))
 full_jack_deps := $(patsubst %.jack, %.dex.toc, $(full_jack_deps))
 endif
-
-else # LOCAL_IS_HOST_MODULE
-
-ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true)
-ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-my_bootclasspath :=
-else
-my_bootclasspath := $(call jack-lib-files,core-oj-hostdex core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
-endif
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath)
-# Compiling against the final jack library. If we want to add support for obfuscated library
-# we'll need to change that to compile against the not obfuscated jack library.
-full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES :=
-full_shared_jack_libs := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_deps := $(full_shared_jack_libs)
-endif # USE_CORE_LIB_BOOTCLASSPATH
 endif # !LOCAL_IS_HOST_MODULE
-full_jack_libs := $(full_shared_jack_libs) $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
+full_shared_jack_libs += $(LOCAL_JACK_CLASSPATH)
 full_jack_deps += $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
 
 ifndef LOCAL_IS_HOST_MODULE
@@ -378,7 +353,6 @@
 
   # link against the jar with full original names (before proguard processing).
   full_shared_jack_libs += $(link_apk_jack_libraries)
-  full_jack_libs += $(link_apk_jack_libraries)
   full_jack_deps += $(link_apk_jack_libraries)
 endif
 
@@ -388,14 +362,13 @@
 ifdef LOCAL_INSTRUMENTATION_FOR
    # link against the jar with full original names (before proguard processing).
    link_instr_classes_jack := $(link_instr_intermediates_dir.COMMON)/classes.noshrob.jack
-   full_jack_libs += $(link_instr_classes_jack)
+   full_shared_jack_libs += $(link_instr_classes_jack)
    full_jack_deps += $(link_instr_classes_jack)
 endif  # LOCAL_INSTRUMENTATION_FOR
 endif  # !LOCAL_IS_HOST_MODULE
 
 # Propagate local configuration options to this target.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_JACK_LIBRARIES:= $(full_jack_libs)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_SHARED_LIBRARIES:= $(full_shared_jack_libs)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 
 endif  # need_compile_java
-endif # LOCAL_JACK_ENABLED
diff --git a/core/java_library.mk b/core/java_library.mk
index 81a4a6a..283e9ad 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -28,11 +28,9 @@
 include $(BUILD_SYSTEM)/configure_local_jack.mk
 #################################
 
-ifdef LOCAL_JACK_ENABLED
 ifdef LOCAL_IS_STATIC_JAVA_LIBRARY
 LOCAL_BUILT_MODULE_STEM := classes.jack
 endif
-endif
 
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 
@@ -47,12 +45,8 @@
 ifeq (true,$(EMMA_INSTRUMENT))
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
-ifdef LOCAL_JACK_ENABLED
 # Jack supports coverage with Jacoco
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
-else
-LOCAL_STATIC_JAVA_LIBRARIES += emma
-endif # LOCAL_JACK_ENABLED
 endif # LOCAL_EMMA_INSTRUMENT
 endif # EMMA_INSTRUMENT_STATIC
 else
@@ -66,44 +60,28 @@
 ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
 # No dex; all we want are the .class files with resources.
 $(common_javalib.jar) : $(java_resource_sources)
-ifdef LOCAL_PROGUARD_ENABLED
-$(common_javalib.jar) : $(full_classes_proguard_jar)
-else
 $(common_javalib.jar) : $(full_classes_jar)
-endif
 	@echo "target Static Jar: $(PRIVATE_MODULE) ($@)"
 	$(copy-file-to-target)
 
-ifdef LOCAL_JACK_ENABLED
 $(LOCAL_BUILT_MODULE) : $(full_classes_jack)
-else
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
-endif
 	$(copy-file-to-target)
 
 else # !LOCAL_IS_STATIC_JAVA_LIBRARY
 
 $(common_javalib.jar): PRIVATE_DEX_FILE := $(built_dex)
-$(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(common_javalib.jar): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
 $(common_javalib.jar) : $(built_dex) $(java_resource_sources) | $(ZIPTIME)
 	@echo "target Jar: $(PRIVATE_MODULE) ($@)"
-ifdef LOCAL_JACK_ENABLED
 	$(create-empty-package)
-else
-	$(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@)
-endif
 	$(add-dex-to-package)
-ifdef LOCAL_JACK_ENABLED
 	$(add-carried-jack-resources)
-endif
 	$(remove-timestamps-from-package)
 
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(dexpreopt_boot_jar_module),) # boot jar
 # boot jar's rules are defined in dex_preopt.mk
 dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
-$(LOCAL_BUILT_MODULE) : $(dexpreopted_boot_jar) | $(ACP)
+$(LOCAL_BUILT_MODULE) : $(dexpreopted_boot_jar)
 	$(call copy-file-to-target)
 
 # For libart boot jars, we don't have .odex files.
@@ -114,7 +92,7 @@
 	@echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
 	$(call dexpreopt-one-file,$<,$@)
 
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar) | $(ACP)
+$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
 	$(call copy-file-to-target)
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
@@ -123,7 +101,7 @@
 endif # ! boot jar
 
 else # LOCAL_DEX_PREOPT
-$(LOCAL_BUILT_MODULE) : $(common_javalib.jar) | $(ACP)
+$(LOCAL_BUILT_MODULE) : $(common_javalib.jar)
 	$(call copy-file-to-target)
 
 endif # LOCAL_DEX_PREOPT
diff --git a/core/legacy_prebuilts.mk b/core/legacy_prebuilts.mk
deleted file mode 100644
index f4633d0..0000000
--- a/core/legacy_prebuilts.mk
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# Copyright (C) 2010 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is the list of modules grandfathered to use ALL_PREBUILT
-
-# DO NOT ADD ANY NEW MODULE TO THIS FILE
-#
-# ALL_PREBUILT modules are hard to control and audit and we don't want
-# to add any new such module in the system
-
-GRANDFATHERED_ALL_PREBUILT := \
-	bmgr \
-	ime \
-	input \
-	monkey \
-	pm \
-	RFFspeed_501.bmd \
-	RFFstd_501.bmd \
-	svc
diff --git a/core/main.mk b/core/main.mk
index f9aad2b..08f9eb6 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -57,9 +57,6 @@
 
 BUILD_SYSTEM := $(TOPDIR)build/core
 
-# Ensure JAVA_NOT_REQUIRED is not set externally.
-JAVA_NOT_REQUIRED := false
-
 # This is the default target.  It must be the first declared target.
 .PHONY: droid
 DEFAULT_GOAL := droid
@@ -80,6 +77,7 @@
     stnod systemtarball-nodeps \
     userdataimage-nodeps userdatatarball-nodeps \
     cacheimage-nodeps \
+    bptimage-nodeps \
     vendorimage-nodeps \
     ramdisk-nodeps \
     bootimage-nodeps \
@@ -99,22 +97,20 @@
 # and host information.
 include $(BUILD_SYSTEM)/config.mk
 
-relaunch_with_ninja :=
-ifneq ($(USE_NINJA),false)
-ifndef BUILDING_WITH_NINJA
-relaunch_with_ninja := true
-endif
+# Default soong to on
+USE_SOONG ?= true
+
+ifndef KATI
+ifdef USE_NINJA
+$(warning USE_NINJA is ignored. Ninja is always used.)
 endif
 
-ifeq ($(relaunch_with_ninja),true)
 # Mark this is a ninja build.
 $(shell mkdir -p $(OUT_DIR) && touch $(OUT_DIR)/ninja_build)
 include build/core/ninja.mk
-else # !relaunch_with_ninja
-ifndef BUILDING_WITH_NINJA
-# Remove ninja build mark if it exists.
-$(shell rm -f $(OUT_DIR)/ninja_build)
-endif
+else # KATI
+
+include $(SOONG_MAKEVARS_MK)
 
 # Write the build number to a file so it can be read back in
 # without changing the command line every time.  Avoids rebuilds
@@ -145,8 +141,9 @@
 -include vendor/google/build/config.mk
 
 VERSION_CHECK_SEQUENCE_NUMBER := 6
+JAVA_NOT_REQUIRED_CHECKED :=
 -include $(OUT_DIR)/versions_checked.mk
-ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER),$(VERSIONS_CHECKED))
+ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER)$(JAVA_NOT_REQUIRED),$(VERSIONS_CHECKED)$(JAVA_NOT_REQUIRED_CHECKED))
 
 $(info Checking build tools versions...)
 
@@ -177,7 +174,7 @@
 $(error Directory names containing spaces not supported)
 endif
 
-ifeq ($(JAVA_NOT_REQUIRED), false)
+ifneq ($(JAVA_NOT_REQUIRED),true)
 java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
 javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
 
@@ -279,6 +276,8 @@
         > $(OUT_DIR)/versions_checked.mk)
 $(shell echo 'BUILD_EMULATOR ?= $(BUILD_EMULATOR)' \
         >> $(OUT_DIR)/versions_checked.mk)
+$(shell echo 'JAVA_NOT_REQUIRED_CHECKED := $(JAVA_NOT_REQUIRED)' \
+        >> $(OUT_DIR)/versions_checked.mk)
 endif
 
 # These are the modifier targets that don't do anything themselves, but
@@ -294,6 +293,12 @@
 # Bring in standard build system definitions.
 include $(BUILD_SYSTEM)/definitions.mk
 
+ifneq ($(USE_SOONG),true)
+$(eval $(call copy-toolchain-library,libgcc))
+$(eval $(call copy-toolchain-library,libatomic))
+$(eval $(call copy-toolchain-library,libgcov))
+endif
+
 # Bring in dex_preopt.mk
 include $(BUILD_SYSTEM)/dex_preopt.mk
 
@@ -568,22 +573,6 @@
   $(call assert-product-vars, __STASHED)
 endif
 
-include $(BUILD_SYSTEM)/legacy_prebuilts.mk
-ifneq ($(filter-out $(GRANDFATHERED_ALL_PREBUILT),$(strip $(notdir $(ALL_PREBUILT)))),)
-  $(warning *** Some files have been added to ALL_PREBUILT.)
-  $(warning *)
-  $(warning * ALL_PREBUILT is a deprecated mechanism that)
-  $(warning * should not be used for new files.)
-  $(warning * As an alternative, use PRODUCT_COPY_FILES in)
-  $(warning * the appropriate product definition.)
-  $(warning * build/target/product/core.mk is the product)
-  $(warning * definition used in all products.)
-  $(warning *)
-  $(foreach bad_prebuilt,$(filter-out $(GRANDFATHERED_ALL_PREBUILT),$(strip $(notdir $(ALL_PREBUILT)))),$(warning * unexpected $(bad_prebuilt) in ALL_PREBUILT))
-  $(warning *)
-  $(error ALL_PREBUILT contains unexpected files)
-endif
-
 # -------------------------------------------------------------------
 # All module makefiles have been included at this point.
 # -------------------------------------------------------------------
@@ -611,19 +600,33 @@
 #
 # Resolve the required module name to 32-bit or 64-bit variant.
 # Get a list of corresponding 32-bit module names, if one exists.
+ifneq ($(TARGET_TRANSLATE_2ND_ARCH),true)
 define get-32-bit-modules
-$(strip $(foreach m,$(1),\
+$(sort $(foreach m,$(1),\
   $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),\
-    $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX))))
+    $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX))\
+  $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+    $(m)$(HOST_2ND_ARCH_MODULE_SUFFIX))\
+    ))
 endef
 # Get a list of corresponding 32-bit module names, if one exists;
 # otherwise return the original module name
 define get-32-bit-modules-if-we-can
-$(strip $(foreach m,$(1),\
-  $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),\
-    $(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX),
-    $(m))))
+$(sort $(foreach m,$(1),\
+  $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS)$(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),\
+    $(if $(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).CLASS),$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX)) \
+    $(if $(ALL_MODULES.$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX).CLASS),$(m)$(HOST_2ND_ARCH_MODULE_SUFFIX)),\
+  $(m))))
 endef
+else  # TARGET_TRANSLATE_2ND_ARCH
+# For binary translation config, by default only install the first arch.
+define get-32-bit-modules
+endef
+
+define get-32-bit-modules-if-we-can
+$(strip $(1))
+endef
+endif  # TARGET_TRANSLATE_2ND_ARCH
 
 # If a module is for a cross host os, the required modules must be for
 # that OS too.
@@ -874,9 +877,6 @@
 # This is used to to get the ordering right, you can also use these,
 # but they're considered undocumented, so don't complain if their
 # behavior changes.
-.PHONY: prebuilt
-prebuilt: $(ALL_PREBUILT)
-
 # An internal target that depends on all copied headers
 # (see copy_headers.make).  Other targets that need the
 # headers to be copied first can depend on this target.
@@ -887,9 +887,8 @@
 
 # All the droid stuff, in directories
 .PHONY: files
-files: prebuilt \
-        $(modules_to_install) \
-        $(INSTALLED_ANDROID_INFO_TXT_TARGET)
+files: $(modules_to_install) \
+       $(INSTALLED_ANDROID_INFO_TXT_TARGET)
 
 # -------------------------------------------------------------------
 
@@ -924,6 +923,9 @@
 .PHONY: cacheimage
 cacheimage: $(INSTALLED_CACHEIMAGE_TARGET)
 
+.PHONY: bptimage
+bptimage: $(INSTALLED_BPTIMAGE_TARGET)
+
 .PHONY: vendorimage
 vendorimage: $(INSTALLED_VENDORIMAGE_TARGET)
 
@@ -953,6 +955,7 @@
 	$(INSTALLED_RECOVERYIMAGE_TARGET) \
 	$(INSTALLED_USERDATAIMAGE_TARGET) \
 	$(INSTALLED_CACHEIMAGE_TARGET) \
+	$(INSTALLED_BPTIMAGE_TARGET) \
 	$(INSTALLED_VENDORIMAGE_TARGET) \
 	$(INSTALLED_FILES_FILE) \
 	$(INSTALLED_FILES_FILE_VENDOR)
@@ -1074,6 +1077,9 @@
 target-native-tests : native-target-tests
 tests : host-tests target-tests
 
+# Phony target to run all java compilations that use javac instead of jack.
+.PHONY: javac-check
+
 # To catch more build breakage, check build tests modules in eng and userdebug builds.
 ifneq ($(ANDROID_NO_TEST_CHECK),true)
 ifneq ($(TARGET_BUILD_PDK),true)
@@ -1092,7 +1098,7 @@
 $(foreach module,$(sample_MODULES),$(eval $(call \
         copy-one-file,$(module),$(sample_APKS_DEST_PATH)/$(notdir $(module)))))
 sample_ADDITIONAL_INSTALLED := \
-        $(filter-out $(modules_to_install) $(modules_to_check) $(ALL_PREBUILT),$(sample_MODULES))
+        $(filter-out $(modules_to_install) $(modules_to_check),$(sample_MODULES))
 samplecode: $(sample_APKS_COLLECTION)
 	@echo "Collect sample code apks: $^"
 	# remove apks that are not intended to be installed.
@@ -1126,4 +1132,9 @@
 .PHONY: nothing
 nothing:
 	@echo Successfully read the makefiles.
-endif # !relaunch_with_ninja
+
+.PHONY: tidy_only
+tidy_only:
+	@echo Successfully make tidy_only.
+
+endif # KATI
diff --git a/core/ninja.mk b/core/ninja.mk
index 9d0ff9a..49953d5 100644
--- a/core/ninja.mk
+++ b/core/ninja.mk
@@ -1,15 +1,6 @@
-NINJA ?= prebuilts/ninja/$(HOST_PREBUILT_TAG)/ninja
+NINJA ?= prebuilts/build-tools/$(HOST_PREBUILT_TAG)/bin/ninja
 
-ifeq ($(USE_SOONG),true)
-USE_SOONG_FOR_KATI := true
-endif
-
-ifeq ($(USE_SOONG_FOR_KATI),true)
 include $(BUILD_SYSTEM)/soong.mk
-else
-KATI ?= $(HOST_OUT_EXECUTABLES)/ckati
-MAKEPARALLEL ?= $(HOST_OUT_EXECUTABLES)/makeparallel
-endif
 
 KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
 
@@ -27,6 +18,7 @@
 	ECLIPSE-% \
 	PRODUCT-% \
 	boottarball-nodeps \
+	brillo_tests \
 	btnod \
 	build-art% \
 	build_kernel-nodeps \
@@ -70,7 +62,7 @@
 -include vendor/google/build/ninja_config.mk
 
 # Any Android goals that need to be built.
-ANDROID_GOALS := $(filter-out $(KATI_OUTPUT_PATTERNS) $(KATI) $(MAKEPARALLEL),\
+ANDROID_GOALS := $(filter-out $(KATI_OUTPUT_PATTERNS) $(CKATI) $(MAKEPARALLEL),\
     $(sort $(ORIGINAL_MAKECMDGOALS) $(MAKECMDGOALS)))
 # Goals we need to pass to Ninja.
 NINJA_GOALS := $(filter-out $(NINJA_EXCLUDE_GOALS), $(ANDROID_GOALS))
@@ -114,25 +106,33 @@
 NINJA_STATUS := [%p %s/%t]$(space)
 endif
 
+NINJA_EXTRA_ARGS :=
+
 ifneq (,$(filter showcommands,$(ORIGINAL_MAKECMDGOALS)))
-NINJA_ARGS += "-v"
+NINJA_EXTRA_ARGS += "-v"
 endif
 
+# Make multiple rules to generate the same target an error instead of
+# proceeding with undefined behavior.
+NINJA_EXTRA_ARGS += -w dupbuild=err
+
 ifdef USE_GOMA
 KATI_MAKEPARALLEL := $(MAKEPARALLEL)
 # Ninja runs remote jobs (i.e., commands which contain gomacc) with
 # this parallelism. Note the parallelism of all other jobs is still
 # limited by the -j flag passed to GNU make.
 NINJA_REMOTE_NUM_JOBS ?= 500
-NINJA_ARGS += -j$(NINJA_REMOTE_NUM_JOBS)
+NINJA_EXTRA_ARGS += -j$(NINJA_REMOTE_NUM_JOBS)
 else
 NINJA_MAKEPARALLEL := $(MAKEPARALLEL) --ninja
 endif
 
+NINJA_ARGS += $(NINJA_EXTRA_ARGS)
+
 ifeq ($(USE_SOONG),true)
 COMBINED_BUILD_NINJA := $(OUT_DIR)/combined$(KATI_NINJA_SUFFIX).ninja
 
-$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) $(SOONG_ANDROID_MK)
+$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA)
 	$(hide) echo "builddir = $(OUT_DIR)" > $(COMBINED_BUILD_NINJA)
 	$(hide) echo "subninja $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
 	$(hide) echo "subninja $(KATI_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
@@ -159,34 +159,9 @@
 ifeq ($(KATI_EMULATE_FIND),false)
   KATI_FIND_EMULATOR :=
 endif
-$(KATI_BUILD_NINJA): $(KATI) $(MAKEPARALLEL) $(DUMMY_OUT_MKS) $(SOONG_ANDROID_MK) FORCE
+$(KATI_BUILD_NINJA): $(CKATI) $(MAKEPARALLEL) $(DUMMY_OUT_MKS) run_soong FORCE
 	@echo Running kati to generate build$(KATI_NINJA_SUFFIX).ninja...
-	+$(hide) $(KATI_MAKEPARALLEL) $(KATI) --ninja --ninja_dir=$(OUT_DIR) --ninja_suffix=$(KATI_NINJA_SUFFIX) --regen --ignore_dirty=$(OUT_DIR)/% --no_ignore_dirty=$(SOONG_ANDROID_MK) --ignore_optional_include=$(OUT_DIR)/%.P --detect_android_echo $(KATI_FIND_EMULATOR) -f build/core/main.mk $(KATI_GOALS) --gen_all_targets BUILDING_WITH_NINJA=true SOONG_ANDROID_MK=$(SOONG_ANDROID_MK)
-
-ifneq ($(USE_SOONG_FOR_KATI),true)
-KATI_CXX := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_CFLAGS) $(CLANG_HOST_GLOBAL_CPPFLAGS)
-KATI_LD := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_LDFLAGS)
-# Build static ckati. Unfortunately Mac OS X doesn't officially support static exectuables.
-ifeq ($(BUILD_OS),linux)
-# We need everything in libpthread.a otherwise C++11's threading library will be disabled.
-KATI_LD += -static -Wl,--whole-archive -lpthread -Wl,--no-whole-archive -ldl
-endif
-
-KATI_INTERMEDIATES_PATH := $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/ckati_intermediates
-KATI_BIN_PATH := $(HOST_OUT_EXECUTABLES)
-include build/kati/Makefile.ckati
-
-MAKEPARALLEL_CXX := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_CFLAGS) $(CLANG_HOST_GLOBAL_CPPFLAGS)
-MAKEPARALLEL_LD := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_LDFLAGS)
-# Build static makeparallel. Unfortunately Mac OS X doesn't officially support static exectuables.
-ifeq ($(BUILD_OS),linux)
-MAKEPARALLEL_LD += -static
-endif
-
-MAKEPARALLEL_INTERMEDIATES_PATH := $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/makeparallel_intermediates
-MAKEPARALLEL_BIN_PATH := $(HOST_OUT_EXECUTABLES)
-include build/tools/makeparallel/Makefile
-endif
+	+$(hide) $(KATI_MAKEPARALLEL) $(CKATI) --ninja --ninja_dir=$(OUT_DIR) --ninja_suffix=$(KATI_NINJA_SUFFIX) --regen --ignore_dirty=$(OUT_DIR)/% --no_ignore_dirty=$(SOONG_ANDROID_MK) --no_ignore_dirty=$(SOONG_MAKEVARS_MK) --ignore_optional_include=$(OUT_DIR)/%.P --detect_android_echo $(KATI_FIND_EMULATOR) -f build/core/main.mk $(KATI_GOALS) --gen_all_targets BUILDING_WITH_NINJA=true SOONG_ANDROID_MK=$(SOONG_ANDROID_MK) SOONG_MAKEVARS_MK=$(SOONG_MAKEVARS_MK)
 
 .PHONY: FORCE
 FORCE:
diff --git a/core/no_java_path/jar b/core/no_java_path/jar
new file mode 120000
index 0000000..8586397
--- /dev/null
+++ b/core/no_java_path/jar
@@ -0,0 +1 @@
+java
\ No newline at end of file
diff --git a/core/no_java_path/jarsigner b/core/no_java_path/jarsigner
new file mode 120000
index 0000000..8586397
--- /dev/null
+++ b/core/no_java_path/jarsigner
@@ -0,0 +1 @@
+java
\ No newline at end of file
diff --git a/core/no_java_path/java b/core/no_java_path/java
new file mode 100755
index 0000000..f3422f3
--- /dev/null
+++ b/core/no_java_path/java
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+echo "Error: JAVA_NOT_REQUIRED=true, $(basename $0) is unavailable." 1>&2
+exit 1
diff --git a/core/no_java_path/javac b/core/no_java_path/javac
new file mode 120000
index 0000000..8586397
--- /dev/null
+++ b/core/no_java_path/javac
@@ -0,0 +1 @@
+java
\ No newline at end of file
diff --git a/core/no_java_path/keytool b/core/no_java_path/keytool
new file mode 120000
index 0000000..8586397
--- /dev/null
+++ b/core/no_java_path/keytool
@@ -0,0 +1 @@
+java
\ No newline at end of file
diff --git a/core/package.mk b/core/package.mk
index 78b65db..8c2c435 100644
--- a/core/package.mk
+++ b/core/package.mk
@@ -2,6 +2,10 @@
 # TARGET_ARCH and TARGET_2ND_ARCH.
 # To build it for TARGET_2ND_ARCH in a 64bit product, use "LOCAL_MULTILIB := 32".
 
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+LOCAL_MULTILIB := first
+endif
+
 my_prefix := TARGET_
 include $(BUILD_SYSTEM)/multilib.mk
 
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 551f18e..9545823 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -209,12 +209,10 @@
 endif # !custom
 LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_PROGUARD_FLAGS)
 
-ifdef LOCAL_JACK_ENABLED
 ifndef LOCAL_JACK_PROGUARD_FLAGS
     LOCAL_JACK_PROGUARD_FLAGS := $(LOCAL_PROGUARD_FLAGS)
 endif
 LOCAL_JACK_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_JACK_PROGUARD_FLAGS)
-endif # LOCAL_JACK_ENABLED
 
 ifeq (true,$(EMMA_INSTRUMENT))
 ifndef LOCAL_EMMA_INSTRUMENT
@@ -227,52 +225,35 @@
 LOCAL_EMMA_INSTRUMENT := false
 endif # EMMA_INSTRUMENT is true
 
-ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
-ifeq (true,$(EMMA_INSTRUMENT_STATIC))
-ifdef LOCAL_JACK_ENABLED
-# Jack supports coverage with Jacoco
 ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
 # Only add jacocoagent if the package contains some java code
+ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
+ifeq (true,$(EMMA_INSTRUMENT_STATIC))
+# Jack supports coverage with Jacoco
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
-endif # Contains java code
-else
-LOCAL_STATIC_JAVA_LIBRARIES += emma
-endif # LOCAL_JACK_ENABLED
-else
+else  # ! EMMA_INSTRUMENT_STATIC
 ifdef LOCAL_SDK_VERSION
 ifdef TARGET_BUILD_APPS
 # In unbundled build, merge the coverage library into the apk.
-ifdef LOCAL_JACK_ENABLED
 # Jack supports coverage with Jacoco
-ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
-# Only add jacocoagent if the package contains some java code
 LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
 # Exclude jacoco classes from proguard
 LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
 LOCAL_JACK_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
-endif # Contains java code
-else
-LOCAL_STATIC_JAVA_LIBRARIES += emma
-endif # LOCAL_JACK_ENABLED
-else
+else # ! TARGET_BUILD_APPS
 # If build against the SDK in full build, core.jar is not used
 # so coverage classes are not present.
-ifdef LOCAL_JACK_ENABLED
 # Jack needs jacoco on the classpath but we do not want it to be in
 # the final apk. While it is a static library, we add it to the
 # LOCAL_JAVA_LIBRARIES which are only present on the classpath.
 # Note: we have nothing to do for proguard since jacoco will be
 # on the classpath only, thus not modified during the compilation.
 LOCAL_JAVA_LIBRARIES += jacocoagent
-else
-# We have to use prebuilt emma.jar to make Proguard happy;
-# Otherwise emma classes are included in core.jar.
-LOCAL_PROGUARD_FLAGS += -libraryjars $(EMMA_JAR)
-endif # LOCAL_JACK_ENABLED
-endif # full build
+endif # TARGET_BUILD_APPS
 endif # LOCAL_SDK_VERSION
-endif # EMMA_INSTRUMENT_STATIC
+endif # ! EMMA_INSTRUMENT_STATIC
 endif # LOCAL_EMMA_INSTRUMENT
+endif # Contains java code
 
 rs_compatibility_jni_libs :=
 
@@ -393,30 +374,13 @@
 
 endif  # LOCAL_USE_AAPT2
 
+# Make sure to generate R.java before compiling.
 # Other modules should depend on the BUILT module if
 # they want to use this module's R.java file.
-$(LOCAL_BUILT_MODULE): $(R_file_stamp)
-
-ifdef LOCAL_JACK_ENABLED
-ifneq ($(built_dex_intermediate),)
-$(built_dex_intermediate): $(R_file_stamp)
-endif
-ifneq ($(noshrob_classes_jack),)
-$(noshrob_classes_jack): $(R_file_stamp)
-endif
-ifneq ($(full_classes_jack),)
-$(full_classes_jack): $(R_file_stamp)
-$(jack_check_timestamp): $(R_file_stamp)
-endif
-endif # LOCAL_JACK_ENABLED
-
-ifneq ($(full_classes_jar),)
-# If full_classes_jar is non-empty, we're building sources.
-# If we're building sources, the initial javac step (which
-# produces full_classes_compiled_jar) needs to ensure the
-# R.java and Manifest.java files have been generated first.
-$(full_classes_compiled_jar): $(R_file_stamp)
-endif
+$(LOCAL_BUILT_MODULE) \
+$(full_classes_compiled_jar) \
+$(built_dex_intermediate) $(noshrob_classes_jack) $(full_classes_jack) $(jack_check_timestamp) \
+  :  $(R_file_stamp)
 
 endif  # need_compile_res
 
@@ -462,12 +426,9 @@
 
 ifneq ($(full_classes_jar),)
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
-# Use the jarjar processed arhive as the initial package file.
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
 $(LOCAL_BUILT_MODULE): $(built_dex)
 else
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE :=
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE :=
 endif # full_classes_jar
 
 include $(BUILD_SYSTEM)/install_jni_libs.mk
@@ -494,6 +455,7 @@
 endif
 private_key := $(LOCAL_CERTIFICATE).pk8
 certificate := $(LOCAL_CERTIFICATE).x509.pem
+additional_certificates := $(foreach c,$(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
 
 $(LOCAL_BUILT_MODULE): $(private_key) $(certificate) $(SIGNAPK_JAR)
 $(LOCAL_BUILT_MODULE): PRIVATE_PRIVATE_KEY := $(private_key)
@@ -502,8 +464,8 @@
 PACKAGES.$(LOCAL_PACKAGE_NAME).PRIVATE_KEY := $(private_key)
 PACKAGES.$(LOCAL_PACKAGE_NAME).CERTIFICATE := $(certificate)
 
-$(LOCAL_BUILT_MODULE): PRIVATE_ADDITIONAL_CERTIFICATES := $(foreach c,\
-    $(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
+$(LOCAL_BUILT_MODULE): $(additional_certificates)
+$(LOCAL_BUILT_MODULE): PRIVATE_ADDITIONAL_CERTIFICATES := $(additional_certificates)
 
 # Define the rule to build the actual package.
 # PRIVATE_JNI_SHARED_LIBRARIES is a list of <abi>:<path_of_built_lib>.
@@ -525,7 +487,6 @@
     $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(PRODUCT_AAPT_PREF_CONFIG)
 endif
 endif
-$(LOCAL_BUILT_MODULE): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
 $(LOCAL_BUILT_MODULE) : $(jni_shared_libraries)
 ifdef LOCAL_USE_AAPT2
 $(LOCAL_BUILT_MODULE): PRIVATE_RES_PACKAGE := $(my_res_package)
@@ -535,22 +496,9 @@
 endif
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
 ifdef LOCAL_USE_AAPT2
-ifdef LOCAL_JACK_ENABLED
 	$(call copy-file-to-new-target)
-else
-	@# TODO: implement merge-two-packages.
-	$(if $(PRIVATE_SOURCE_ARCHIVE),\
-	  $(call merge-two-packages,$(PRIVATE_RES_PACKAGE) $(PRIVATE_SOURCE_ARCHIVE),$@),
-	  $(call copy-file-to-new-target))
-endif
-else  # LOCAL_USE_AAPT2
-ifdef LOCAL_JACK_ENABLED
+else  # ! LOCAL_USE_AAPT2
 	$(create-empty-package)
-else
-	$(if $(PRIVATE_SOURCE_ARCHIVE),\
-	  $(call initialize-package-file,$(PRIVATE_SOURCE_ARCHIVE),$@),\
-	  $(create-empty-package))
-endif
 	$(add-assets-to-package)
 endif  # LOCAL_USE_AAPT2
 ifneq ($(jni_shared_libraries),)
@@ -562,9 +510,7 @@
 else  # full_classes_jar
 	$(add-dex-to-package)
 endif  # full_classes_jar
-ifdef LOCAL_JACK_ENABLED
 	$(add-carried-jack-resources)
-endif
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(BUILD_PLATFORM_ZIP),)
 	@# Keep a copy of apk with classes.dex unstripped
@@ -623,7 +569,7 @@
 	$(sign-package)
 
 # Rules to install the splits
-$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk | $(ACP)
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
diff --git a/core/phony_package.mk b/core/phony_package.mk
index 866b13c..b534335 100644
--- a/core/phony_package.mk
+++ b/core/phony_package.mk
@@ -7,7 +7,7 @@
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
-$(LOCAL_BUILT_MODULE): $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(hide) echo "Fake: $@"
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
diff --git a/core/prebuilt.mk b/core/prebuilt.mk
index 428922b..f1edc8a 100644
--- a/core/prebuilt.mk
+++ b/core/prebuilt.mk
@@ -11,6 +11,10 @@
   LOCAL_HOST_PREFIX :=
 else
   my_prefix := TARGET_
+
+  ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+    LOCAL_MULTILIB := first
+  endif
 endif
 
 include $(BUILD_SYSTEM)/multilib.mk
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index ee68427..069c2e2 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -111,7 +111,7 @@
 ifdef prebuilt_module_is_a_library
 export_includes := $(intermediates)/export_includes
 $(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE_DEP)
+$(export_includes) :
 	@echo Export includes file: $< -- $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 ifdef LOCAL_EXPORT_C_INCLUDE_DIRS
@@ -178,6 +178,7 @@
   LOCAL_CERTIFICATE := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
   PACKAGES.$(LOCAL_MODULE).EXTERNAL_KEY := 1
 
+  $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
   $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
   $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
 endif
@@ -204,6 +205,7 @@
   PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
   PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
 
+  $(built_module) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
   $(built_module) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
   $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
 endif
@@ -221,7 +223,7 @@
 #######################################
 ifneq ($(LOCAL_REPLACE_PREBUILT_APK_INSTALLED),)
 # There is a replacement for the prebuilt .apk we can install without any processing.
-$(built_module) : $(LOCAL_REPLACE_PREBUILT_APK_INSTALLED) | $(ACP)
+$(built_module) : $(LOCAL_REPLACE_PREBUILT_APK_INSTALLED)
 	$(transform-prebuilt-to-target)
 
 else  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
@@ -239,7 +241,7 @@
 endif
 $(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs)
 
-$(built_module) : $(my_prebuilt_src_file) | $(ACP) $(ZIPALIGN) $(SIGNAPK_JAR) $(AAPT)
+$(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(SIGNAPK_JAR) $(AAPT)
 	$(transform-prebuilt-to-target)
 	$(uncompress-shared-libs)
 ifdef LOCAL_DEX_PREOPT
@@ -283,14 +285,15 @@
 endif
 my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
 
+$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
 $(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
 $(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk | $(ACP) $(AAPT)
+$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk | $(AAPT)
 	$(copy-file-to-new-target)
 	$(sign-package)
 
 # Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk | $(ACP)
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk
 	@echo "Install: $@"
 	$(copy-file-to-new-target)
 
@@ -315,7 +318,7 @@
 ifneq ($(dexpreopt_boot_jar_module),) # boot jar
 # boot jar's rules are defined in dex_preopt.mk
 dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
-$(built_module) : $(dexpreopted_boot_jar) | $(ACP)
+$(built_module) : $(dexpreopted_boot_jar)
 	$(call copy-file-to-target)
 
 # For libart boot jars, we don't have .odex files.
@@ -326,37 +329,27 @@
 	@echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
 	$(call dexpreopt-one-file,$<,$@)
 
-$(built_module) : $(my_prebuilt_src_file) | $(ACP)
+$(built_module) : $(my_prebuilt_src_file)
 	$(call copy-file-to-target)
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
 endif # boot jar
 else # ! LOCAL_DEX_PREOPT
-$(built_module) : $(my_prebuilt_src_file) | $(ACP)
+$(built_module) : $(my_prebuilt_src_file)
 	$(call copy-file-to-target)
 endif # LOCAL_DEX_PREOPT
 
 else  # ! prebuilt_module_is_dex_javalib
+$(built_module) : $(my_prebuilt_src_file)
 ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
-$(built_module) : $(my_prebuilt_src_file)
 	$(transform-prebuilt-to-target-strip-comments)
-ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
-	$(hide) chmod +x $@
-endif
-else ifneq ($(LOCAL_ACP_UNAVAILABLE),true)
-$(built_module) : $(my_prebuilt_src_file) | $(ACP)
-	$(transform-prebuilt-to-target)
-ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
-	$(hide) chmod +x $@
-endif
 else
-$(built_module) : $(my_prebuilt_src_file)
-	$(copy-file-to-target-with-cp)
+	$(transform-prebuilt-to-target)
+endif
 ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
 	$(hide) chmod +x $@
 endif
-endif
 endif # ! prebuilt_module_is_dex_javalib
 endif # LOCAL_MODULE_CLASS != APPS
 
@@ -372,7 +365,7 @@
 
 ifeq ($(prebuilt_module_is_dex_javalib),true)
 # For prebuilt shared Java library we don't have classes.jar.
-$(common_javalib_jar) : $(my_src_jar) | $(ACP)
+$(common_javalib_jar) : $(my_src_jar)
 	$(transform-prebuilt-to-target)
 
 else  # ! prebuilt_module_is_dex_javalib
@@ -390,10 +383,10 @@
 
 endif
 
-$(common_classes_jar) : $(my_src_jar) | $(ACP)
+$(common_classes_jar) : $(my_src_jar)
 	$(transform-prebuilt-to-target)
 
-$(common_javalib_jar) : $(common_classes_jar) | $(ACP)
+$(common_javalib_jar) : $(common_classes_jar)
 	$(transform-prebuilt-to-target)
 
 $(call define-jar-to-toc-rule, $(common_classes_jar))
@@ -435,15 +428,13 @@
 endif # LOCAL_IS_HOST_MODULE is not set
 
 ifneq ($(prebuilt_module_is_dex_javalib),true)
-ifneq ($(LOCAL_JILL_FLAGS),)
-$(error LOCAL_JILL_FLAGS is not supported any more, please use jack options in LOCAL_JACK_FLAGS instead)
-endif
 
 # We may be building classes.jack from a host jar for host dalvik Java library.
 $(intermediates.COMMON)/classes.jack : PRIVATE_JACK_FLAGS:=$(LOCAL_JACK_FLAGS)
 $(intermediates.COMMON)/classes.jack : PRIVATE_JACK_MIN_SDK_VERSION := 1
-$(intermediates.COMMON)/classes.jack : $(my_src_jar) $(LOCAL_MODULE_MAKEFILE_DEP) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK) | setup-jack-server
+$(intermediates.COMMON)/classes.jack : $(my_src_jar) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK_DEFAULT_ARGS) $(JACK) \
+        | setup-jack-server
 	$(transform-jar-to-jack)
 
 # Update timestamps of .toc files for prebuilts so dependents will be
@@ -454,6 +445,6 @@
 endif # ! prebuilt_module_is_dex_javalib
 endif # JAVA_LIBRARIES
 
-$(built_module) : $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+$(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index b9a5e3e..cf35b5e 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -43,9 +43,9 @@
 ifeq ($(LOCAL_NO_LIBGCC),true)
 my_target_libgcc :=
 else
-my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
+my_target_libgcc := $(call intermediates-dir-for,STATIC_LIBRARIES,libgcc,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libgcc.a
 endif
-my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
+my_target_libatomic := $(call intermediates-dir-for,STATIC_LIBRARIES,libatomic,,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/libatomic.a
 ifeq ($(LOCAL_NO_CRT),true)
 my_target_crtbegin_so_o :=
 my_target_crtend_so_o :=
@@ -76,7 +76,8 @@
         $(all_libraries) \
         $(my_target_crtbegin_so_o) \
         $(my_target_crtend_so_o) \
-        $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(my_target_libgcc) \
+        $(my_target_libatomic) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-o-to-shared-lib)
 
diff --git a/core/soong.mk b/core/soong.mk
index 2463953..032efdf 100644
--- a/core/soong.mk
+++ b/core/soong.mk
@@ -1,15 +1,17 @@
 SOONG_OUT_DIR := $(OUT_DIR)/soong
-SOONG_HOST_EXECUTABLES := $(SOONG_OUT_DIR)/host/$(HOST_PREBUILT_TAG)/bin
-KATI := $(SOONG_HOST_EXECUTABLES)/ckati
-MAKEPARALLEL := $(SOONG_HOST_EXECUTABLES)/makeparallel
-
 SOONG := $(SOONG_OUT_DIR)/soong
 SOONG_BOOTSTRAP := $(SOONG_OUT_DIR)/.soong.bootstrap
 SOONG_BUILD_NINJA := $(SOONG_OUT_DIR)/build.ninja
-SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android.mk
 SOONG_IN_MAKE := $(SOONG_OUT_DIR)/.soong.in_make
+SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
 SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
 
+# Only include the Soong-generated Android.mk if we're merging the
+# Soong-defined binaries with Kati-defined binaries.
+ifeq ($(USE_SOONG),true)
+SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
+endif
+
 # We need to rebootstrap soong if SOONG_OUT_DIR or the reverse path from
 # SOONG_OUT_DIR to TOP changes
 SOONG_NEEDS_REBOOTSTRAP :=
@@ -18,7 +20,7 @@
     SOONG_NEEDS_REBOOTSTRAP := FORCE
     $(warning soong_out_dir changed)
   endif
-  ifneq ($(strip $(shell build/soong/reverse_path.py $(SOONG_OUT_DIR))),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$SRCDIR_FROM_BUILDDIR)))
+  ifneq ($(strip $(shell build/soong/scripts/reverse_path.py $(SOONG_OUT_DIR))),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$SRCDIR_FROM_BUILDDIR)))
     SOONG_NEEDS_REBOOTSTRAP := FORCE
     $(warning reverse path changed)
   endif
@@ -36,11 +38,16 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) (\
 	echo '{'; \
-	echo '    "Device_uses_jemalloc": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
-	echo '    "Device_uses_dlmalloc": $(if $(filter true,$(MALLOC_SVELTE)),true,false),'; \
+	echo '    "Make_suffix": "-$(TARGET_PRODUCT)",'; \
+	echo ''; \
 	echo '    "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
 	echo '    "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
 	echo '    "Brillo": $(if $(BRILLO),true,false),'; \
+	echo '    "Malloc_not_svelte": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
+	echo '    "Allow_missing_dependencies": $(if $(TARGET_BUILD_APPS)$(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),true,false),'; \
+	echo '    "SanitizeHost": [$(if $(SANITIZE_HOST),"$(subst $(comma),"$(comma)",$(SANITIZE_HOST))")],'; \
+	echo '    "SanitizeDevice": [$(if $(SANITIZE_TARGET),"$(subst $(comma),"$(comma)",$(SANITIZE_TARGET))")],'; \
+	echo '    "HostStaticBinaries": $(if $(strip $(BUILD_HOST_static)),true,false),'; \
 	echo ''; \
 	echo '    "DeviceName": "$(TARGET_DEVICE)",'; \
 	echo '    "DeviceArch": "$(TARGET_ARCH)",'; \
@@ -59,7 +66,8 @@
 	echo ''; \
 	echo '    "CrossHost": "$(HOST_CROSS_OS)",'; \
 	echo '    "CrossHostArch": "$(HOST_CROSS_ARCH)",'; \
-	echo '    "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)"'; \
+	echo '    "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)",'; \
+	echo '    "Safestack": $(if $(filter true,$(USE_SAFESTACK)),true,false)'; \
 	echo '}') > $(SOONG_VARIABLES_TMP); \
 	if ! cmp -s $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); then \
 	  mv $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); \
@@ -72,9 +80,8 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
 
-# Build an Android.mk listing all soong outputs as prebuilts
-$(SOONG_ANDROID_MK): $(SOONG_BOOTSTRAP) $(SOONG_VARIABLES) $(SOONG_IN_MAKE) FORCE
-	$(hide) $(SOONG) $(KATI) $(MAKEPARALLEL) $(NINJA_ARGS)
-
-$(KATI): $(SOONG_ANDROID_MK)
-$(MAKEPARALLEL): $(SOONG_ANDROID_MK)
+# Run Soong, this implicitly create an Android.mk listing all soong outputs as
+# prebuilts.
+.PHONY: run_soong
+run_soong: $(SOONG_BOOTSTRAP) $(SOONG_VARIABLES) $(SOONG_IN_MAKE) FORCE
+	$(hide) $(SOONG) $(SOONG_BUILD_NINJA) $(NINJA_EXTRA_ARGS)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 1279878..3295c06 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -74,12 +74,10 @@
 
 LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_PROGUARD_FLAGS)
 
-ifdef LOCAL_JACK_ENABLED
 ifndef LOCAL_JACK_PROGUARD_FLAGS
     LOCAL_JACK_PROGUARD_FLAGS := $(LOCAL_PROGUARD_FLAGS)
 endif
 LOCAL_JACK_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_JACK_PROGUARD_FLAGS)
-endif # LOCAL_JACK_ENABLED
 
 R_file_stamp := $(intermediates.COMMON)/src/R.stamp
 LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
@@ -165,13 +163,10 @@
 	$(hide) find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name R.java | xargs cat > $@
 endif  # LOCAL_USE_AAPT2
 
-$(LOCAL_BUILT_MODULE): $(R_file_stamp)
-ifdef LOCAL_JACK_ENABLED
-$(noshrob_classes_jack): $(R_file_stamp)
-$(full_classes_jack): $(R_file_stamp)
-$(jack_check_timestamp): $(R_file_stamp)
-endif # LOCAL_JACK_ENABLED
-$(full_classes_compiled_jar): $(R_file_stamp)
+$(LOCAL_BUILT_MODULE) \
+$(full_classes_compiled_jar) \
+$(noshrob_classes_jack) $(full_classes_jack) $(jack_check_timestamp) \
+  : $(R_file_stamp)
 
 # Rule to build AAR, archive including classes.jar, resource, etc.
 built_aar := $(intermediates.COMMON)/javalib.aar
diff --git a/core/static_library_internal.mk b/core/static_library_internal.mk
index cabe823..2b49046 100644
--- a/core/static_library_internal.mk
+++ b/core/static_library_internal.mk
@@ -20,14 +20,6 @@
 
 include $(BUILD_SYSTEM)/binary.mk
 
-ifeq ($(LOCAL_RAW_STATIC_LIBRARY),true)
-LOCAL_RAW_STATIC_LIBRARY:=
-$(all_objects) : PRIVATE_TARGET_PROJECT_INCLUDES :=
-$(all_objects) : PRIVATE_TARGET_C_INCLUDES :=
-$(all_objects) : PRIVATE_TARGET_GLOBAL_CFLAGS :=
-$(all_objects) : PRIVATE_TARGET_GLOBAL_CPPFLAGS :=
-endif
-
 $(LOCAL_BUILT_MODULE) : $(built_whole_libraries)
 $(LOCAL_BUILT_MODULE) : $(all_objects)
 	$(transform-o-to-static-lib)
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index 4715fe8..9e25674 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -4,12 +4,21 @@
 
 LOCAL_CFLAGS += -DGTEST_OS_LINUX_ANDROID -DGTEST_HAS_STD_STRING
 
-LOCAL_C_INCLUDES += external/gtest/include
-
 ifndef LOCAL_SDK_VERSION
-LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
+    LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
 else
-LOCAL_STATIC_LIBRARIES += libgtest_main_ndk libgtest_ndk
+    ifneq (,$(filter c++_%,$(LOCAL_NDK_STL_VARIANT)))
+        my_ndk_gtest_suffix := _libcxx
+    else ifneq ($(filter stlport_,$(LOCAL_NDK_STL_VARIANT)),)
+        my_ndk_gtest_suffix :=
+    else ifneq ($(filter gnustl_,$(LOCAL_NDK_STL_VARIANT)),)
+        my_ndk_gtest_suffix := _gnustl
+    else # system STL, use stlport
+        my_ndk_gtest_suffix :=
+    endif
+    LOCAL_STATIC_LIBRARIES += \
+        libgtest_main_ndk$(my_ndk_gtest_suffix) \
+        libgtest_ndk$(my_ndk_gtest_suffix)
 endif
 
 ifdef LOCAL_MODULE_PATH
diff --git a/envsetup.sh b/envsetup.sh
index 35df2d5..458d13b 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -715,11 +715,18 @@
     local T="$1"
     test "$WITH_STATIC_ANALYZER" = "0" && unset WITH_STATIC_ANALYZER
     if [ -n "$WITH_STATIC_ANALYZER" ]; then
+        # Use scan-build to collect all static analyzer reports into directory
+        # /tmp/scan-build-yyyy-mm-dd-hhmmss-*
+        # The clang compiler passed by --use-analyzer here is not important.
+        # build/core/binary.mk will set CLANG_CXX and CLANG before calling
+        # c++-analyzer and ccc-analyzer.
+        local CLANG_VERSION=$(get_build_var LLVM_PREBUILTS_VERSION)
+        local BUILD_OS=$(get_build_var BUILD_OS)
+        local CLANG_DIR="$T/prebuilts/clang/host/${BUILD_OS}-x86/${CLANG_VERSION}"
         echo "\
-$T/prebuilts/misc/linux-x86/analyzer/tools/scan-build/scan-build \
---use-analyzer $T/prebuilts/misc/linux-x86/analyzer/bin/analyzer \
---status-bugs \
---top=$T"
+${CLANG_DIR}/tools/scan-build/bin/scan-build \
+--use-analyzer ${CLANG_DIR}/bin/clang \
+--status-bugs"
     fi
 }
 
@@ -787,6 +794,9 @@
               MODULES=all_modules
               ARGS=$@
             fi
+            if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
+              MODULES=tidy_only
+            fi
             ONE_SHOT_MAKEFILE=$M $DRV make -C $T -f build/core/main.mk $MODULES $ARGS
         fi
     fi
@@ -838,6 +848,9 @@
           ARGS=$GET_INSTALL_PATH
           MODULES=
         fi
+        if [ "1" = "${WITH_TIDY_ONLY}" -o "true" = "${WITH_TIDY_ONLY}" ]; then
+          MODULES=tidy_only
+        fi
         ONE_SHOT_MAKEFILE="$MAKEFILE" $DRV make -C $T -f build/core/main.mk $DASH_ARGS $MODULES $ARGS
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
@@ -909,7 +922,11 @@
 {
     T=$(gettop)
     if [ "$T" ]; then
-        \cd $(gettop)
+        if [ "$1" ]; then
+            \cd $(gettop)/$1
+        else
+            \cd $(gettop)
+        fi
     else
         echo "Couldn't locate the top of the tree.  Try setting TOP."
     fi
@@ -1022,7 +1039,7 @@
         return;
     fi;
     echo "Setting core limit for $PID to infinite...";
-    adb shell prlimit $PID 4 -1 -1
+    adb shell /system/bin/ulimit -p $PID -c unlimited
 }
 
 # core - send SIGV and pull the core for process
diff --git a/libs/host/Android.mk b/libs/host/Android.mk
index bc25e4b..5e6a291 100644
--- a/libs/host/Android.mk
+++ b/libs/host/Android.mk
@@ -12,10 +12,6 @@
 LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
 LOCAL_CXX_STL := none
 
-# acp uses libhost, so we can't use
-# acp to install libhost.
-LOCAL_ACP_UNAVAILABLE:= true
-
 include $(BUILD_HOST_STATIC_LIBRARY)
 
 # Include toolchain prebuilt modules if they exist.
diff --git a/target/board/generic/sepolicy/file.te b/target/board/generic/sepolicy/file.te
index 6fad80a..9227f80 100644
--- a/target/board/generic/sepolicy/file.te
+++ b/target/board/generic/sepolicy/file.te
@@ -1 +1,2 @@
 type qemud_socket, file_type;
+type sysfs_writable, fs_type, sysfs_type, mlstrustedobject;
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 553bec9..6958ba5 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -13,7 +13,7 @@
 
 TARGET_2ND_CPU_ABI := x86
 TARGET_2ND_ARCH := x86
-TARGET_2ND_ARCH_VARIANT := x86
+TARGET_2ND_ARCH_VARIANT := x86_64
 
 TARGET_USES_64_BIT_BINDER := true
 
diff --git a/target/board/generic_x86_arm/BoardConfig.mk b/target/board/generic_x86_arm/BoardConfig.mk
new file mode 100644
index 0000000..6e2573e
--- /dev/null
+++ b/target/board/generic_x86_arm/BoardConfig.mk
@@ -0,0 +1,60 @@
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Configuration for generic_x86 + arm libraries needed by binary translation.
+
+# The generic product target doesn't have any hardware-specific pieces.
+TARGET_NO_BOOTLOADER := true
+TARGET_NO_KERNEL := true
+TARGET_CPU_ABI := x86
+TARGET_ARCH := x86
+TARGET_ARCH_VARIANT := x86
+
+TARGET_2ND_ARCH := arm
+TARGET_2ND_CPU_ABI := armeabi-v7a
+TARGET_2ND_CPU_ABI2 := armeabi
+TARGET_2ND_ARCH_VARIANT := armv7-a
+TARGET_2ND_CPU_VARIANT := generic
+
+# Tell the build system this isn't a typical 64bit+32bit multilib configuration.
+TARGET_TRANSLATE_2ND_ARCH := true
+
+# no hardware camera
+USE_CAMERA_STUB := true
+
+# Enable dex-preoptimization to speed up the first boot sequence
+# of an SDK AVD. Note that this operation only works on Linux for now
+ifeq ($(HOST_OS),linux)
+  ifeq ($(WITH_DEXPREOPT),)
+    WITH_DEXPREOPT := true
+  endif
+endif
+
+# Build OpenGLES emulation host and guest libraries
+BUILD_EMULATOR_OPENGL := true
+
+# Build and enable the OpenGL ES View renderer. When running on the emulator,
+# the GLES renderer disables itself if host GL acceleration isn't available.
+USE_OPENGL_RENDERER := true
+
+TARGET_USERIMAGES_USE_EXT4 := true
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
+BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
+BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_FLASH_BLOCK_SIZE := 512
+TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+
+BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 69edc72..42447f1 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -53,6 +53,7 @@
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/full.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
+    $(LOCAL_DIR)/aosp_x86_arm.mk \
     $(LOCAL_DIR)/full_x86.mk \
     $(LOCAL_DIR)/aosp_mips.mk \
     $(LOCAL_DIR)/full_mips.mk \
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
new file mode 100644
index 0000000..85a2cf8
--- /dev/null
+++ b/target/product/aosp_x86_arm.mk
@@ -0,0 +1,42 @@
+#
+# Copyright 2016 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+# aosp_x86 with arm libraries needed by binary translation.
+
+include $(SRC_TARGET_DIR)/product/full_x86.mk
+
+# arm libraries. This is the list of shared libraries included in the NDK.
+# Their dependency libraries will be automatically pulled in.
+PRODUCT_PACKAGES += \
+  libandroid_arm \
+  libc_arm \
+  libdl_arm \
+  libEGL_arm \
+  libGLESv1_CM_arm \
+  libGLESv2_arm \
+  libGLESv3_arm \
+  libjnigraphics_arm \
+  liblog_arm \
+  libm_arm \
+  libmediandk_arm \
+  libOpenMAXAL_arm \
+  libstdc++_arm \
+  libOpenSLES_arm \
+  libz_arm \
+
+PRODUCT_NAME := aosp_x86_arm
+PRODUCT_DEVICE := generic_x86_arm
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index 7504d01..0da84fd 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -68,6 +68,8 @@
     libfilterfw \
     libkeystore \
     libgatekeeper \
+    libwebviewchromium_loader \
+    libwebviewchromium_plat_support \
     libwilhelm \
     logd \
     make_ext4fs \
diff --git a/tools/acp/Android.mk b/tools/acp/Android.mk
index eec9c9d..56aac14 100644
--- a/tools/acp/Android.mk
+++ b/tools/acp/Android.mk
@@ -11,7 +11,6 @@
 
 LOCAL_STATIC_LIBRARIES := libhost
 LOCAL_MODULE := acp
-LOCAL_ACP_UNAVAILABLE := true
 LOCAL_CXX_STL := none
 
 include $(BUILD_HOST_EXECUTABLE)
diff --git a/tools/apksigner/Android.mk b/tools/apksigner/Android.mk
new file mode 100644
index 0000000..a7b4414
--- /dev/null
+++ b/tools/apksigner/Android.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH := $(call my-dir)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/tools/apksigner/core/Android.mk b/tools/apksigner/core/Android.mk
new file mode 100644
index 0000000..c86208b
--- /dev/null
+++ b/tools/apksigner/core/Android.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+# apksigner library, for signing APKs and verification signatures of APKs
+# ============================================================
+include $(CLEAR_VARS)
+LOCAL_MODULE := apksigner-core
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+LOCAL_JAVA_LIBRARIES = \
+  bouncycastle-host \
+  bouncycastle-bcpkix-host
+include $(BUILD_HOST_JAVA_LIBRARY)
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/ApkSignerEngine.java b/tools/apksigner/core/src/com/android/apksigner/core/ApkSignerEngine.java
new file mode 100644
index 0000000..36f2a08
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/ApkSignerEngine.java
@@ -0,0 +1,407 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.security.InvalidKeyException;
+import java.security.SignatureException;
+import java.util.List;
+
+import com.android.apksigner.core.util.DataSink;
+import com.android.apksigner.core.util.DataSource;
+
+/**
+ * APK signing logic which is independent of how input and output APKs are stored, parsed, and
+ * generated.
+ *
+ * <p><h3>Operating Model</h3>
+ *
+ * The abstract operating model is that there is an input APK which is being signed, thus producing
+ * an output APK. In reality, there may be just an output APK being built from scratch, or the input APK and
+ * the output APK may be the same file. Because this engine does not deal with reading and writing
+ * files, it can handle all of these scenarios.
+ *
+ * <p>The engine is stateful and thus cannot be used for signing multiple APKs. However, once
+ * the engine signed an APK, the engine can be used to re-sign the APK after it has been modified.
+ * This may be more efficient than signing the APK using a new instance of the engine. See
+ * <a href="#incremental">Incremental Operation</a>.
+ *
+ * <p>In the engine's operating model, a signed APK is produced as follows.
+ * <ol>
+ * <li>JAR entries to be signed are output,</li>
+ * <li>JAR archive is signed using JAR signing, thus adding the so-called v1 signature to the
+ *     output,</li>
+ * <li>JAR archive is signed using APK Signature Scheme v2, thus adding the so-called v2 signature
+ *     to the output.</li>
+ * </ol>
+ *
+ * <p>The input APK may contain JAR entries which, depending on the engine's configuration, may or
+ * may not be output (e.g., existing signatures may need to be preserved or stripped) or which the
+ * engine will overwrite as part of signing. The engine thus offers {@link #inputJarEntry(String)}
+ * which tells the client whether the input JAR entry needs to be output. This avoids the need for
+ * the client to hard-code the aspects of APK signing which determine which parts of input must be
+ * ignored. Similarly, the engine offers {@link #inputApkSigningBlock(DataSource)} to help the
+ * client avoid dealing with preserving or stripping APK Signature Scheme v2 signature of the input
+ * APK.
+ *
+ * <p>To use the engine to sign an input APK (or a collection of JAR entries), follow these
+ * steps:
+ * <ol>
+ * <li>Obtain a new instance of the engine -- engine instances are stateful and thus cannot be used
+ *     for signing multiple APKs.</li>
+ * <li>Locate the input APK's APK Signing Block and provide it to
+ *     {@link #inputApkSigningBlock(DataSource)}.</li>
+ * <li>For each JAR entry in the input APK, invoke {@link #inputJarEntry(String)} to determine
+ *     whether this entry should be output. The engine may request to inspect the entry.</li>
+ * <li>For each output JAR entry, invoke {@link #outputJarEntry(String)} which may request to
+ *     inspect the entry.</li>
+ * <li>Once all JAR entries have been output, invoke {@link #outputJarEntries()} which may request
+ *     that additional JAR entries are output. These entries comprise the output APK's JAR
+ *     signature.</li>
+ * <li>Locate the ZIP Central Directory and ZIP End of Central Directory sections in the output and
+ *     invoke {@link #outputZipSections(DataSource, DataSource, DataSource)} which may request that
+ *     an APK Signature Block is inserted before the ZIP Central Directory. The block contains the
+ *     output APK's APK Signature Scheme v2 signature.</li>
+ * <li>Invoke {@link #outputDone()} to signal that the APK was output in full. The engine will
+ *     confirm that the output APK is signed.</li>
+ * <li>Invoke {@link #close()} to signal that the engine will no longer be used. This lets the
+ *     engine free any resources it no longer needs.
+ * </ol>
+ *
+ * <p>Some invocations of the engine may provide the client with a task to perform. The client is
+ * expected to perform all requested tasks before proceeding to the next stage of signing. See
+ * documentation of each method about the deadlines for performing the tasks requested by the
+ * method.
+ *
+ * <p><h3 id="incremental">Incremental Operation</h3></a>
+ *
+ * The engine supports incremental operation where a signed APK is produced, then modified and
+ * re-signed. This may be useful for IDEs, where an app is frequently re-signed after small changes
+ * by the developer. Re-signing may be more efficient than signing from scratch.
+ *
+ * <p>To use the engine in incremental mode, keep notifying the engine of changes to the APK through
+ * {@link #inputApkSigningBlock(DataSource)}, {@link #inputJarEntry(String)},
+ * {@link #inputJarEntryRemoved(String)}, {@link #outputJarEntry(String)},
+ * and {@link #outputJarEntryRemoved(String)}, perform the tasks requested by the engine through
+ * these methods, and, when a new signed APK is desired, run through steps 5 onwards to re-sign the
+ * APK.
+ *
+ * <p><h3>Output-only Operation</h3>
+ *
+ * The engine's abstract operating model consists of an input APK and an output APK. However, it is
+ * possible to use the engine in output-only mode where the engine's {@code input...} methods are
+ * not invoked. In this mode, the engine has less control over output because it cannot request that
+ * some JAR entries are not output. Nevertheless, the engine will attempt to make the output APK
+ * signed and will report an error if cannot do so.
+ */
+public interface ApkSignerEngine extends Closeable {
+
+    /**
+     * Indicates to this engine that the input APK contains the provided APK Signing Block. The
+     * block may contain signatures of the input APK, such as APK Signature Scheme v2 signatures.
+     *
+     * @param apkSigningBlock APK signing block of the input APK. The provided data source is
+     *        guaranteed to not be used by the engine after this method terminates.
+     *
+     * @throws IllegalStateException if this engine is closed
+     */
+    void inputApkSigningBlock(DataSource apkSigningBlock) throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that the specified JAR entry was encountered in the input APK.
+     *
+     * <p>When an input entry is updated/changed, it's OK to not invoke
+     * {@link #inputJarEntryRemoved(String)} before invoking this method.
+     *
+     * @return instructions about how to proceed with this entry
+     *
+     * @throws IllegalStateException if this engine is closed
+     */
+    InputJarEntryInstructions inputJarEntry(String entryName) throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that the specified JAR entry was output.
+     *
+     * <p>It is unnecessary to invoke this method for entries added to output by this engine (e.g.,
+     * requested by {@link #outputJarEntries()}) provided the entries were output with exactly the
+     * data requested by the engine.
+     *
+     * <p>When an already output entry is updated/changed, it's OK to not invoke
+     * {@link #outputJarEntryRemoved(String)} before invoking this method.
+     *
+     * @return request to inspect the entry or {@code null} if the engine does not need to inspect
+     *         the entry. The request must be fulfilled before {@link #outputJarEntries()} is
+     *         invoked.
+     *
+     * @throws IllegalStateException if this engine is closed
+     */
+    InspectJarEntryRequest outputJarEntry(String entryName) throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that the specified JAR entry was removed from the input. It's safe
+     * to invoke this for entries for which {@link #inputJarEntry(String)} hasn't been invoked.
+     *
+     * @return output policy of this JAR entry. The policy indicates how this input entry affects
+     *         the output APK. The client of this engine should use this information to determine
+     *         how the removal of this input APK's JAR entry affects the output APK.
+     *
+     * @throws IllegalStateException if this engine is closed
+     */
+    InputJarEntryInstructions.OutputPolicy inputJarEntryRemoved(String entryName)
+            throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that the specified JAR entry was removed from the output. It's safe
+     * to invoke this for entries for which {@link #outputJarEntry(String)} hasn't been invoked.
+     *
+     * @throws IllegalStateException if this engine is closed
+     */
+    void outputJarEntryRemoved(String entryName) throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that all JAR entries have been output.
+     *
+     *
+     * @return request to add JAR signature to the output or {@code null} if there is no need to add
+     *         a JAR signature. The request will contain additional JAR entries to be output. The
+     *         request must be fulfilled before
+     *         {@link #outputZipSections(DataSource, DataSource, DataSource)} is invoked.
+     *
+     * @throws InvalidKeyException if a signature could not be generated because a signing key is
+     *         not suitable for generating the signature
+     * @throws SignatureException if an error occurred while generating the JAR signature
+     * @throws IllegalStateException if there are unfulfilled requests, such as to inspect some JAR
+     *         entries, or if the engine is closed
+     */
+    OutputJarSignatureRequest outputJarEntries() throws InvalidKeyException, SignatureException;
+
+    /**
+     * Indicates to this engine that the ZIP sections comprising the output APK have been output.
+     *
+     * <p>The provided data sources are guaranteed to not be used by the engine after this method
+     * terminates.
+     *
+     * @param zipEntries the section of ZIP archive containing Local File Header records and data of
+     *        the ZIP entries. In a well-formed archive, this section starts at the start of the
+     *        archive and extends all the way to the ZIP Central Directory.
+     * @param zipCentralDirectory ZIP Central Directory section
+     * @param zipEocd ZIP End of Central Directory (EoCD) record
+     *
+     * @return request to add an APK Signing Block to the output or {@code null} if the output must
+     *         not contain an APK Signing Block. The request must be fulfilled before
+     *         {@link #outputDone()} is invoked.
+     *
+     * @throws IOException if an I/O error occurs while reading the provided ZIP sections
+     * @throws InvalidKeyException if a signature could not be generated because a signing key is
+     *         not suitable for generating the signature
+     * @throws SignatureException if an error occurred while generating the APK's signature
+     * @throws IllegalStateException if there are unfulfilled requests, such as to inspect some JAR
+     *         entries or to output JAR signature, or if the engine is closed
+     */
+    OutputApkSigningBlockRequest outputZipSections(
+            DataSource zipEntries,
+            DataSource zipCentralDirectory,
+            DataSource zipEocd) throws IOException, InvalidKeyException, SignatureException;
+
+    /**
+     * Indicates to this engine that the signed APK was output.
+     *
+     * <p>This does not change the output APK. The method helps the client confirm that the current
+     * output is signed.
+     *
+     * @throws IllegalStateException if there are unfulfilled requests, such as to inspect some JAR
+     *         entries or to output signatures, or if the engine is closed
+     */
+    void outputDone() throws IllegalStateException;
+
+    /**
+     * Indicates to this engine that it will no longer be used. Invoking this on an already closed
+     * engine is OK.
+     *
+     * <p>This does not change the output APK. For example, if the output APK is not yet fully
+     * signed, it will remain so after this method terminates.
+     */
+    @Override
+    void close();
+
+    /**
+     * Instructions about how to handle an input APK's JAR entry.
+     *
+     * <p>The instructions indicate whether to output the entry (see {@link #getOutputPolicy()}) and
+     * may contain a request to inspect the entry (see {@link #getInspectJarEntryRequest()}), in
+     * which case the request must be fulfilled before {@link ApkSignerEngine#outputJarEntries()} is
+     * invoked.
+     */
+    public static class InputJarEntryInstructions {
+        private final OutputPolicy mOutputPolicy;
+        private final InspectJarEntryRequest mInspectJarEntryRequest;
+
+        /**
+         * Constructs a new {@code InputJarEntryInstructions} instance with the provided entry
+         * output policy and without a request to inspect the entry.
+         */
+        public InputJarEntryInstructions(OutputPolicy outputPolicy) {
+            this(outputPolicy, null);
+        }
+
+        /**
+         * Constructs a new {@code InputJarEntryInstructions} instance with the provided entry
+         * output mode and with the provided request to inspect the entry.
+         *
+         * @param inspectJarEntryRequest request to inspect the entry or {@code null} if there's no
+         *        need to inspect the entry.
+         */
+        public InputJarEntryInstructions(
+                OutputPolicy outputPolicy,
+                InspectJarEntryRequest inspectJarEntryRequest) {
+            mOutputPolicy = outputPolicy;
+            mInspectJarEntryRequest = inspectJarEntryRequest;
+        }
+
+        /**
+         * Returns the output policy for this entry.
+         */
+        public OutputPolicy getOutputPolicy() {
+            return mOutputPolicy;
+        }
+
+        /**
+         * Returns the request to inspect the JAR entry or {@code null} if there is no need to
+         * inspect the entry.
+         */
+        public InspectJarEntryRequest getInspectJarEntryRequest() {
+            return mInspectJarEntryRequest;
+        }
+
+        /**
+         * Output policy for an input APK's JAR entry.
+         */
+        public static enum OutputPolicy {
+            /** Entry must not be output. */
+            SKIP,
+
+            /** Entry should be output. */
+            OUTPUT,
+
+            /** Entry will be output by the engine. The client can thus ignore this input entry. */
+            OUTPUT_BY_ENGINE,
+        }
+    }
+
+    /**
+     * Request to inspect the specified JAR entry.
+     *
+     * <p>The entry's uncompressed data must be provided to the data sink returned by
+     * {@link #getDataSink()}. Once the entry's data has been provided to the sink, {@link #done()}
+     * must be invoked.
+     */
+    interface InspectJarEntryRequest {
+
+        /**
+         * Returns the data sink into which the entry's uncompressed data should be sent.
+         */
+        DataSink getDataSink();
+
+        /**
+         * Indicates that entry's data has been provided in full.
+         */
+        void done();
+
+        /**
+         * Returns the name of the JAR entry.
+         */
+        String getEntryName();
+    }
+
+    /**
+     * Request to add JAR signature (aka v1 signature) to the output APK.
+     *
+     * <p>Entries listed in {@link #getAdditionalJarEntries()} must be added to the output APK after
+     * which {@link #done()} must be invoked.
+     */
+    interface OutputJarSignatureRequest {
+
+        /**
+         * Returns JAR entries that must be added to the output APK.
+         */
+        List<JarEntry> getAdditionalJarEntries();
+
+        /**
+         * Indicates that the JAR entries contained in this request were added to the output APK.
+         */
+        void done();
+
+        /**
+         * JAR entry.
+         */
+        public static class JarEntry {
+            private final String mName;
+            private final byte[] mData;
+
+            /**
+             * Constructs a new {@code JarEntry} with the provided name and data.
+             *
+             * @param data uncompressed data of the entry. Changes to this array will not be
+             *        reflected in {@link #getData()}.
+             */
+            public JarEntry(String name, byte[] data) {
+                mName = name;
+                mData = data.clone();
+            }
+
+            /**
+             * Returns the name of this ZIP entry.
+             */
+            public String getName() {
+                return mName;
+            }
+
+            /**
+             * Returns the uncompressed data of this JAR entry.
+             */
+            public byte[] getData() {
+                return mData.clone();
+            }
+        }
+    }
+
+    /**
+     * Request to add the specified APK Signing Block to the output APK. APK Signature Scheme v2
+     * signature(s) of the APK are contained in this block.
+     *
+     * <p>The APK Signing Block returned by {@link #getApkSigningBlock()} must be placed into the
+     * output APK such that the block is immediately before the ZIP Central Directory, the offset of
+     * ZIP Central Directory in the ZIP End of Central Directory record must be adjusted
+     * accordingly, and then {@link #done()} must be invoked.
+     *
+     * <p>If the output contains an APK Signing Block, that block must be replaced by the block
+     * contained in this request.
+     */
+    interface OutputApkSigningBlockRequest {
+
+        /**
+         * Returns the APK Signing Block.
+         */
+        byte[] getApkSigningBlock();
+
+        /**
+         * Indicates that the APK Signing Block was output as requested.
+         */
+        void done();
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/DefaultApkSignerEngine.java b/tools/apksigner/core/src/com/android/apksigner/core/DefaultApkSignerEngine.java
new file mode 100644
index 0000000..30d4011
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/DefaultApkSignerEngine.java
@@ -0,0 +1,870 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core;
+
+import com.android.apksigner.core.internal.apk.v1.DigestAlgorithm;
+import com.android.apksigner.core.internal.apk.v1.V1SchemeSigner;
+import com.android.apksigner.core.internal.apk.v2.MessageDigestSink;
+import com.android.apksigner.core.internal.apk.v2.V2SchemeSigner;
+import com.android.apksigner.core.internal.util.ByteArrayOutputStreamSink;
+import com.android.apksigner.core.internal.util.Pair;
+import com.android.apksigner.core.util.DataSink;
+import com.android.apksigner.core.util.DataSource;
+
+import java.io.IOException;
+import java.security.InvalidKeyException;
+import java.security.MessageDigest;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.SignatureException;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Default implementation of {@link ApkSignerEngine}.
+ *
+ * <p>Use {@link Builder} to obtain instances of this engine.
+ */
+public class DefaultApkSignerEngine implements ApkSignerEngine {
+
+    // IMPLEMENTATION NOTE: This engine generates a signed APK as follows:
+    // 1. The engine asks its client to output input JAR entries which are not part of JAR
+    //    signature.
+    // 2. If JAR signing (v1 signing) is enabled, the engine inspects the output JAR entries to
+    //    compute their digests, to be placed into output META-INF/MANIFEST.MF. It also inspects
+    //    the contents of input and output META-INF/MANIFEST.MF to borrow the main section of the
+    //    file. It does not care about individual (i.e., JAR entry-specific) sections. It then
+    //    emits the v1 signature (a set of JAR entries) and asks the client to output them.
+    // 3. If APK Signature Scheme v2 (v2 signing) is enabled, the engine emits an APK Signing Block
+    //    from outputZipSections() and asks its client to insert this block into the output.
+
+    private final boolean mV1SigningEnabled;
+    private final boolean mV2SigningEnabled;
+    private final boolean mOtherSignersSignaturesPreserved;
+    private final List<V1SchemeSigner.SignerConfig> mV1SignerConfigs;
+    private final DigestAlgorithm mV1ContentDigestAlgorithm;
+    private final List<V2SchemeSigner.SignerConfig> mV2SignerConfigs;
+
+    private boolean mClosed;
+
+    private boolean mV1SignaturePending;
+
+    /**
+     * Names of JAR entries which this engine is expected to output as part of v1 signing.
+     */
+    private final Set<String> mSignatureExpectedOutputJarEntryNames;
+
+    /** Requests for digests of output JAR entries. */
+    private final Map<String, GetJarEntryDataDigestRequest> mOutputJarEntryDigestRequests =
+            new HashMap<>();
+
+    /** Digests of output JAR entries. */
+    private final Map<String, byte[]> mOutputJarEntryDigests = new HashMap<>();
+
+    /** Data of JAR entries emitted by this engine as v1 signature. */
+    private final Map<String, byte[]> mEmittedSignatureJarEntryData = new HashMap<>();
+
+    /** Requests for data of output JAR entries which comprise the v1 signature. */
+    private final Map<String, GetJarEntryDataRequest> mOutputSignatureJarEntryDataRequests =
+            new HashMap<>();
+    /**
+     * Request to obtain the data of MANIFEST.MF or {@code null} if the request hasn't been issued.
+     */
+    private GetJarEntryDataRequest mInputJarManifestEntryDataRequest;
+
+    /**
+     * Request to output the emitted v1 signature or {@code null} if the request hasn't been issued.
+     */
+    private OutputJarSignatureRequestImpl mAddV1SignatureRequest;
+
+    private boolean mV2SignaturePending;
+
+    /**
+     * Request to output the emitted v2 signature or {@code null} if the request hasn't been issued.
+     */
+    private OutputApkSigningBlockRequestImpl mAddV2SignatureRequest;
+
+    private DefaultApkSignerEngine(
+            List<SignerConfig> signerConfigs,
+            int minSdkVersion,
+            boolean v1SigningEnabled,
+            boolean v2SigningEnabled,
+            boolean otherSignersSignaturesPreserved) throws InvalidKeyException {
+        if (signerConfigs.isEmpty()) {
+            throw new IllegalArgumentException("At least one signer config must be provided");
+        }
+        if (otherSignersSignaturesPreserved) {
+            throw new UnsupportedOperationException(
+                    "Preserving other signer's signatures is not yet implemented");
+        }
+
+        mV1SigningEnabled = v1SigningEnabled;
+        mV2SigningEnabled = v2SigningEnabled;
+        mOtherSignersSignaturesPreserved = otherSignersSignaturesPreserved;
+        mV1SignerConfigs =
+                (v1SigningEnabled)
+                        ? new ArrayList<>(signerConfigs.size()) : Collections.emptyList();
+        mV2SignerConfigs =
+                (v2SigningEnabled)
+                        ? new ArrayList<>(signerConfigs.size()) : Collections.emptyList();
+        mV1ContentDigestAlgorithm =
+                (v1SigningEnabled)
+                        ? V1SchemeSigner.getSuggestedContentDigestAlgorithm(minSdkVersion) : null;
+        for (SignerConfig signerConfig : signerConfigs) {
+            List<X509Certificate> certificates = signerConfig.getCertificates();
+            PublicKey publicKey = certificates.get(0).getPublicKey();
+
+            if (v1SigningEnabled) {
+                DigestAlgorithm v1SignatureDigestAlgorithm =
+                        V1SchemeSigner.getSuggestedSignatureDigestAlgorithm(
+                                publicKey, minSdkVersion);
+                V1SchemeSigner.SignerConfig v1SignerConfig = new V1SchemeSigner.SignerConfig();
+                v1SignerConfig.name = signerConfig.getName();
+                v1SignerConfig.privateKey = signerConfig.getPrivateKey();
+                v1SignerConfig.certificates = certificates;
+                v1SignerConfig.contentDigestAlgorithm = mV1ContentDigestAlgorithm;
+                v1SignerConfig.signatureDigestAlgorithm = v1SignatureDigestAlgorithm;
+                mV1SignerConfigs.add(v1SignerConfig);
+            }
+
+            if (v2SigningEnabled) {
+                V2SchemeSigner.SignerConfig v2SignerConfig = new V2SchemeSigner.SignerConfig();
+                v2SignerConfig.privateKey = signerConfig.getPrivateKey();
+                v2SignerConfig.certificates = certificates;
+                v2SignerConfig.signatureAlgorithms =
+                        V2SchemeSigner.getSuggestedSignatureAlgorithms(publicKey, minSdkVersion);
+                mV2SignerConfigs.add(v2SignerConfig);
+            }
+        }
+        mSignatureExpectedOutputJarEntryNames =
+                (v1SigningEnabled)
+                        ? V1SchemeSigner.getOutputEntryNames(mV1SignerConfigs)
+                        : Collections.emptySet();
+    }
+
+    @Override
+    public void inputApkSigningBlock(DataSource apkSigningBlock) {
+        checkNotClosed();
+
+        if ((apkSigningBlock == null) || (apkSigningBlock.size() == 0)) {
+            return;
+        }
+
+        if (mOtherSignersSignaturesPreserved) {
+            // TODO: Preserve blocks other than APK Signature Scheme v2 blocks of signers configured
+            // in this engine.
+            return;
+        }
+        // TODO: Preserve blocks other than APK Signature Scheme v2 blocks.
+    }
+
+    @Override
+    public InputJarEntryInstructions inputJarEntry(String entryName) {
+        checkNotClosed();
+
+        InputJarEntryInstructions.OutputPolicy outputPolicy =
+                getInputJarEntryOutputPolicy(entryName);
+        switch (outputPolicy) {
+            case SKIP:
+                return new InputJarEntryInstructions(InputJarEntryInstructions.OutputPolicy.SKIP);
+            case OUTPUT:
+                return new InputJarEntryInstructions(InputJarEntryInstructions.OutputPolicy.OUTPUT);
+            case OUTPUT_BY_ENGINE:
+                if (V1SchemeSigner.MANIFEST_ENTRY_NAME.equals(entryName)) {
+                    // We copy the main section of the JAR manifest from input to output. Thus, this
+                    // invalidates v1 signature and we need to see the entry's data.
+                    mInputJarManifestEntryDataRequest = new GetJarEntryDataRequest(entryName);
+                    return new InputJarEntryInstructions(
+                            InputJarEntryInstructions.OutputPolicy.OUTPUT_BY_ENGINE,
+                            mInputJarManifestEntryDataRequest);
+                }
+                return new InputJarEntryInstructions(
+                        InputJarEntryInstructions.OutputPolicy.OUTPUT_BY_ENGINE);
+            default:
+                throw new RuntimeException("Unsupported output policy: " + outputPolicy);
+        }
+    }
+
+    @Override
+    public InspectJarEntryRequest outputJarEntry(String entryName) {
+        checkNotClosed();
+        invalidateV2Signature();
+        if (!mV1SigningEnabled) {
+            // No need to inspect JAR entries when v1 signing is not enabled.
+            return null;
+        }
+        // v1 signing is enabled
+
+        if (V1SchemeSigner.isJarEntryDigestNeededInManifest(entryName)) {
+            // This entry is covered by v1 signature. We thus need to inspect the entry's data to
+            // compute its digest(s) for v1 signature.
+
+            // TODO: Handle the case where other signer's v1 signatures are present and need to be
+            // preserved. In that scenario we can't modify MANIFEST.MF and add/remove JAR entries
+            // covered by v1 signature.
+            invalidateV1Signature();
+            GetJarEntryDataDigestRequest dataDigestRequest =
+                    new GetJarEntryDataDigestRequest(
+                            entryName,
+                            V1SchemeSigner.getMessageDigestInstance(mV1ContentDigestAlgorithm));
+            mOutputJarEntryDigestRequests.put(entryName, dataDigestRequest);
+            mOutputJarEntryDigests.remove(entryName);
+            return dataDigestRequest;
+        }
+
+        if (mSignatureExpectedOutputJarEntryNames.contains(entryName)) {
+            // This entry is part of v1 signature generated by this engine. We need to check whether
+            // the entry's data is as output by the engine.
+            invalidateV1Signature();
+            GetJarEntryDataRequest dataRequest;
+            if (V1SchemeSigner.MANIFEST_ENTRY_NAME.equals(entryName)) {
+                dataRequest = new GetJarEntryDataRequest(entryName);
+                mInputJarManifestEntryDataRequest = dataRequest;
+            } else {
+                // If this entry is part of v1 signature which has been emitted by this engine,
+                // check whether the output entry's data matches what the engine emitted.
+                dataRequest =
+                        (mEmittedSignatureJarEntryData.containsKey(entryName))
+                                ? new GetJarEntryDataRequest(entryName) : null;
+            }
+
+            if (dataRequest != null) {
+                mOutputSignatureJarEntryDataRequests.put(entryName, dataRequest);
+            }
+            return dataRequest;
+        }
+
+        // This entry is not covered by v1 signature and isn't part of v1 signature.
+        return null;
+    }
+
+    @Override
+    public InputJarEntryInstructions.OutputPolicy inputJarEntryRemoved(String entryName) {
+        checkNotClosed();
+        return getInputJarEntryOutputPolicy(entryName);
+    }
+
+    @Override
+    public void outputJarEntryRemoved(String entryName) {
+        checkNotClosed();
+        invalidateV2Signature();
+        if (!mV1SigningEnabled) {
+            return;
+        }
+
+        if (V1SchemeSigner.isJarEntryDigestNeededInManifest(entryName)) {
+            // This entry is covered by v1 signature.
+            invalidateV1Signature();
+            mOutputJarEntryDigests.remove(entryName);
+            mOutputJarEntryDigestRequests.remove(entryName);
+            mOutputSignatureJarEntryDataRequests.remove(entryName);
+            return;
+        }
+
+        if (mSignatureExpectedOutputJarEntryNames.contains(entryName)) {
+            // This entry is part of the v1 signature generated by this engine.
+            invalidateV1Signature();
+            return;
+        }
+    }
+
+    @Override
+    public OutputJarSignatureRequest outputJarEntries()
+            throws InvalidKeyException, SignatureException {
+        checkNotClosed();
+
+        if (!mV1SignaturePending) {
+            return null;
+        }
+
+        if ((mInputJarManifestEntryDataRequest != null)
+                && (!mInputJarManifestEntryDataRequest.isDone())) {
+            throw new IllegalStateException(
+                    "Still waiting to inspect input APK's "
+                            + mInputJarManifestEntryDataRequest.getEntryName());
+        }
+
+        for (GetJarEntryDataDigestRequest digestRequest
+                : mOutputJarEntryDigestRequests.values()) {
+            String entryName = digestRequest.getEntryName();
+            if (!digestRequest.isDone()) {
+                throw new IllegalStateException(
+                        "Still waiting to inspect output APK's " + entryName);
+            }
+            mOutputJarEntryDigests.put(entryName, digestRequest.getDigest());
+        }
+        mOutputJarEntryDigestRequests.clear();
+
+        for (GetJarEntryDataRequest dataRequest : mOutputSignatureJarEntryDataRequests.values()) {
+            if (!dataRequest.isDone()) {
+                throw new IllegalStateException(
+                        "Still waiting to inspect output APK's " + dataRequest.getEntryName());
+            }
+        }
+
+        List<Integer> apkSigningSchemeIds =
+                (mV2SigningEnabled) ? Collections.singletonList(2) : Collections.emptyList();
+        byte[] inputJarManifest =
+                (mInputJarManifestEntryDataRequest != null)
+                    ? mInputJarManifestEntryDataRequest.getData() : null;
+
+        // Check whether the most recently used signature (if present) is still fine.
+        List<Pair<String, byte[]>> signatureZipEntries;
+        if ((mAddV1SignatureRequest == null) || (!mAddV1SignatureRequest.isDone())) {
+            try {
+                signatureZipEntries =
+                        V1SchemeSigner.sign(
+                                mV1SignerConfigs,
+                                mV1ContentDigestAlgorithm,
+                                mOutputJarEntryDigests,
+                                apkSigningSchemeIds,
+                                inputJarManifest);
+            } catch (CertificateEncodingException e) {
+                throw new SignatureException("Failed to generate v1 signature", e);
+            }
+        } else {
+            V1SchemeSigner.OutputManifestFile newManifest =
+                    V1SchemeSigner.generateManifestFile(
+                            mV1ContentDigestAlgorithm, mOutputJarEntryDigests, inputJarManifest);
+            byte[] emittedSignatureManifest =
+                    mEmittedSignatureJarEntryData.get(V1SchemeSigner.MANIFEST_ENTRY_NAME);
+            if (!Arrays.equals(newManifest.contents, emittedSignatureManifest)) {
+                // Emitted v1 signature is no longer valid.
+                try {
+                    signatureZipEntries =
+                            V1SchemeSigner.signManifest(
+                                    mV1SignerConfigs,
+                                    mV1ContentDigestAlgorithm,
+                                    apkSigningSchemeIds,
+                                    newManifest);
+                } catch (CertificateEncodingException e) {
+                    throw new SignatureException("Failed to generate v1 signature", e);
+                }
+            } else {
+                // Emitted v1 signature is still valid. Check whether the signature is there in the
+                // output.
+                signatureZipEntries = new ArrayList<>();
+                for (Map.Entry<String, byte[]> expectedOutputEntry
+                        : mEmittedSignatureJarEntryData.entrySet()) {
+                    String entryName = expectedOutputEntry.getKey();
+                    byte[] expectedData = expectedOutputEntry.getValue();
+                    GetJarEntryDataRequest actualDataRequest =
+                            mOutputSignatureJarEntryDataRequests.get(entryName);
+                    if (actualDataRequest == null) {
+                        // This signature entry hasn't been output.
+                        signatureZipEntries.add(Pair.of(entryName, expectedData));
+                        continue;
+                    }
+                    byte[] actualData = actualDataRequest.getData();
+                    if (!Arrays.equals(expectedData, actualData)) {
+                        signatureZipEntries.add(Pair.of(entryName, expectedData));
+                    }
+                }
+                if (signatureZipEntries.isEmpty()) {
+                    // v1 signature in the output is valid
+                    return null;
+                }
+                // v1 signature in the output is not valid.
+            }
+        }
+
+        if (signatureZipEntries.isEmpty()) {
+            // v1 signature in the output is valid
+            mV1SignaturePending = false;
+            return null;
+        }
+
+        List<OutputJarSignatureRequest.JarEntry> sigEntries =
+                new ArrayList<>(signatureZipEntries.size());
+        for (Pair<String, byte[]> entry : signatureZipEntries) {
+            String entryName = entry.getFirst();
+            byte[] entryData = entry.getSecond();
+            sigEntries.add(new OutputJarSignatureRequest.JarEntry(entryName, entryData));
+            mEmittedSignatureJarEntryData.put(entryName, entryData);
+        }
+        mAddV1SignatureRequest = new OutputJarSignatureRequestImpl(sigEntries);
+        return mAddV1SignatureRequest;
+    }
+
+    @Override
+    public OutputApkSigningBlockRequest outputZipSections(
+            DataSource zipEntries,
+            DataSource zipCentralDirectory,
+            DataSource zipEocd) throws IOException, InvalidKeyException, SignatureException {
+        checkNotClosed();
+        checkV1SigningDoneIfEnabled();
+        if (!mV2SigningEnabled) {
+            return null;
+        }
+        invalidateV2Signature();
+
+        byte[] apkSigningBlock =
+                V2SchemeSigner.generateApkSigningBlock(
+                        zipEntries, zipCentralDirectory, zipEocd, mV2SignerConfigs);
+
+        mAddV2SignatureRequest = new OutputApkSigningBlockRequestImpl(apkSigningBlock);
+        return mAddV2SignatureRequest;
+    }
+
+    @Override
+    public void outputDone() {
+        checkNotClosed();
+        checkV1SigningDoneIfEnabled();
+        checkV2SigningDoneIfEnabled();
+    }
+
+    @Override
+    public void close() {
+        mClosed = true;
+
+        mAddV1SignatureRequest = null;
+        mInputJarManifestEntryDataRequest = null;
+        mOutputJarEntryDigestRequests.clear();
+        mOutputJarEntryDigests.clear();
+        mEmittedSignatureJarEntryData.clear();
+        mOutputSignatureJarEntryDataRequests.clear();
+
+        mAddV2SignatureRequest = null;
+    }
+
+    private void invalidateV1Signature() {
+        if (mV1SigningEnabled) {
+            mV1SignaturePending = true;
+        }
+        invalidateV2Signature();
+    }
+
+    private void invalidateV2Signature() {
+        if (mV2SigningEnabled) {
+            mV2SignaturePending = true;
+            mAddV2SignatureRequest = null;
+        }
+    }
+
+    private void checkNotClosed() {
+        if (mClosed) {
+            throw new IllegalStateException("Engine closed");
+        }
+    }
+
+    private void checkV1SigningDoneIfEnabled() {
+        if (!mV1SignaturePending) {
+            return;
+        }
+
+        if (mAddV1SignatureRequest == null) {
+            throw new IllegalStateException(
+                    "v1 signature (JAR signature) not yet generated. Skipped outputJarEntries()?");
+        }
+        if (!mAddV1SignatureRequest.isDone()) {
+            throw new IllegalStateException(
+                    "v1 signature (JAR signature) addition requested by outputJarEntries() hasn't"
+                            + " been fulfilled");
+        }
+        for (Map.Entry<String, byte[]> expectedOutputEntry
+                : mEmittedSignatureJarEntryData.entrySet()) {
+            String entryName = expectedOutputEntry.getKey();
+            byte[] expectedData = expectedOutputEntry.getValue();
+            GetJarEntryDataRequest actualDataRequest =
+                    mOutputSignatureJarEntryDataRequests.get(entryName);
+            if (actualDataRequest == null) {
+                throw new IllegalStateException(
+                        "APK entry " + entryName + " not yet output despite this having been"
+                                + " requested");
+            } else if (!actualDataRequest.isDone()) {
+                throw new IllegalStateException(
+                        "Still waiting to inspect output APK's " + entryName);
+            }
+            byte[] actualData = actualDataRequest.getData();
+            if (!Arrays.equals(expectedData, actualData)) {
+                throw new IllegalStateException(
+                        "Output APK entry " + entryName + " data differs from what was requested");
+            }
+        }
+        mV1SignaturePending = false;
+    }
+
+    private void checkV2SigningDoneIfEnabled() {
+        if (!mV2SignaturePending) {
+            return;
+        }
+        if (mAddV2SignatureRequest == null) {
+            throw new IllegalStateException(
+                    "v2 signature (APK Signature Scheme v2 signature) not yet generated."
+                            + " Skipped outputZipSections()?");
+        }
+        if (!mAddV2SignatureRequest.isDone()) {
+            throw new IllegalStateException(
+                    "v2 signature (APK Signature Scheme v2 signature) addition requested by"
+                            + " outputZipSections() hasn't been fulfilled yet");
+        }
+        mAddV2SignatureRequest = null;
+        mV2SignaturePending = false;
+    }
+
+    /**
+     * Returns the output policy for the provided input JAR entry.
+     */
+    private InputJarEntryInstructions.OutputPolicy getInputJarEntryOutputPolicy(String entryName) {
+        if (mSignatureExpectedOutputJarEntryNames.contains(entryName)) {
+            return InputJarEntryInstructions.OutputPolicy.OUTPUT_BY_ENGINE;
+        }
+        if ((mOtherSignersSignaturesPreserved)
+                || (V1SchemeSigner.isJarEntryDigestNeededInManifest(entryName))) {
+            return InputJarEntryInstructions.OutputPolicy.OUTPUT;
+        }
+        return InputJarEntryInstructions.OutputPolicy.SKIP;
+    }
+
+    private static class OutputJarSignatureRequestImpl implements OutputJarSignatureRequest {
+        private final List<JarEntry> mAdditionalJarEntries;
+        private volatile boolean mDone;
+
+        private OutputJarSignatureRequestImpl(List<JarEntry> additionalZipEntries) {
+            mAdditionalJarEntries =
+                    Collections.unmodifiableList(new ArrayList<>(additionalZipEntries));
+        }
+
+        @Override
+        public List<JarEntry> getAdditionalJarEntries() {
+            return mAdditionalJarEntries;
+        }
+
+        @Override
+        public void done() {
+            mDone = true;
+        }
+
+        private boolean isDone() {
+            return mDone;
+        }
+    }
+
+    private static class OutputApkSigningBlockRequestImpl implements OutputApkSigningBlockRequest {
+        private final byte[] mApkSigningBlock;
+        private volatile boolean mDone;
+
+        private OutputApkSigningBlockRequestImpl(byte[] apkSigingBlock) {
+            mApkSigningBlock = apkSigingBlock.clone();
+        }
+
+        @Override
+        public byte[] getApkSigningBlock() {
+            return mApkSigningBlock.clone();
+        }
+
+        @Override
+        public void done() {
+            mDone = true;
+        }
+
+        private boolean isDone() {
+            return mDone;
+        }
+    }
+
+    /**
+     * JAR entry inspection request which obtain the entry's uncompressed data.
+     */
+    private static class GetJarEntryDataRequest implements InspectJarEntryRequest {
+        private final String mEntryName;
+        private final Object mLock = new Object();
+        private final ByteArrayOutputStreamSink mBuf = new ByteArrayOutputStreamSink();
+
+        private boolean mDone;
+
+        private GetJarEntryDataRequest(String entryName) {
+            mEntryName = entryName;
+        }
+
+        @Override
+        public String getEntryName() {
+            return mEntryName;
+        }
+
+        @Override
+        public DataSink getDataSink() {
+            synchronized (mLock) {
+                checkNotDone();
+                return mBuf;
+            }
+        }
+
+        @Override
+        public void done() {
+            synchronized (mLock) {
+                if (mDone) {
+                    return;
+                }
+                mDone = true;
+            }
+        }
+
+        private boolean isDone() {
+            synchronized (mLock) {
+                return mDone;
+            }
+        }
+
+        private void checkNotDone() throws IllegalStateException {
+            synchronized (mLock) {
+                if (mDone) {
+                    throw new IllegalStateException("Already done");
+                }
+            }
+        }
+
+        private byte[] getData() {
+            synchronized (mLock) {
+                if (!mDone) {
+                    throw new IllegalStateException("Not yet done");
+                }
+                return mBuf.getData();
+            }
+        }
+    }
+
+    /**
+     * JAR entry inspection request which obtains the digest of the entry's uncompressed data.
+     */
+    private static class GetJarEntryDataDigestRequest implements InspectJarEntryRequest {
+        private final String mEntryName;
+        private final MessageDigest mMessageDigest;
+        private final DataSink mDataSink;
+        private final Object mLock = new Object();
+
+        private boolean mDone;
+        private byte[] mDigest;
+
+        private GetJarEntryDataDigestRequest(String entryName, MessageDigest digest) {
+            mEntryName = entryName;
+            mMessageDigest = digest;
+            mDataSink = new MessageDigestSink(new MessageDigest[] {mMessageDigest});
+        }
+
+        @Override
+        public String getEntryName() {
+            return mEntryName;
+        }
+
+        @Override
+        public DataSink getDataSink() {
+            synchronized (mLock) {
+                checkNotDone();
+                return mDataSink;
+            }
+        }
+
+        @Override
+        public void done() {
+            synchronized (mLock) {
+                if (mDone) {
+                    return;
+                }
+                mDone = true;
+                mDigest = mMessageDigest.digest();
+            }
+        }
+
+        private boolean isDone() {
+            synchronized (mLock) {
+                return mDone;
+            }
+        }
+
+        private void checkNotDone() throws IllegalStateException {
+            synchronized (mLock) {
+                if (mDone) {
+                    throw new IllegalStateException("Already done");
+                }
+            }
+        }
+
+        private byte[] getDigest() {
+            synchronized (mLock) {
+                if (!mDone) {
+                    throw new IllegalStateException("Not yet done");
+                }
+                return mDigest.clone();
+            }
+        }
+    }
+
+    /**
+     * Configuration of a signer.
+     *
+     * <p>Use {@link Builder} to obtain configuration instances.
+     */
+    public static class SignerConfig {
+        private final String mName;
+        private final PrivateKey mPrivateKey;
+        private final List<X509Certificate> mCertificates;
+
+        private SignerConfig(
+                String name,
+                PrivateKey privateKey,
+                List<X509Certificate> certificates) {
+            mName = name;
+            mPrivateKey = privateKey;
+            mCertificates = Collections.unmodifiableList(new ArrayList<>(certificates));
+        }
+
+        /**
+         * Returns the name of this signer.
+         */
+        public String getName() {
+            return mName;
+        }
+
+        /**
+         * Returns the signing key of this signer.
+         */
+        public PrivateKey getPrivateKey() {
+            return mPrivateKey;
+        }
+
+        /**
+         * Returns the certificate(s) of this signer. The first certificate's public key corresponds
+         * to this signer's private key.
+         */
+        public List<X509Certificate> getCertificates() {
+            return mCertificates;
+        }
+
+        /**
+         * Builder of {@link SignerConfig} instances.
+         */
+        public static class Builder {
+            private final String mName;
+            private final PrivateKey mPrivateKey;
+            private final List<X509Certificate> mCertificates;
+
+            /**
+             * Constructs a new {@code Builder}.
+             *
+             * @param name signer's name. The name is reflected in the name of files comprising the
+             *        JAR signature of the APK.
+             * @param privateKey signing key
+             * @param certificates list of one or more X.509 certificates. The subject public key of
+             *        the first certificate must correspond to the {@code privateKey}.
+             */
+            public Builder(
+                    String name,
+                    PrivateKey privateKey,
+                    List<X509Certificate> certificates) {
+                mName = name;
+                mPrivateKey = privateKey;
+                mCertificates = new ArrayList<>(certificates);
+            }
+
+            /**
+             * Returns a new {@code SignerConfig} instance configured based on the configuration of
+             * this builder.
+             */
+            public SignerConfig build() {
+                return new SignerConfig(
+                        mName,
+                        mPrivateKey,
+                        mCertificates);
+            }
+        }
+    }
+
+    /**
+     * Builder of {@link DefaultApkSignerEngine} instances.
+     */
+    public static class Builder {
+        private final List<SignerConfig> mSignerConfigs;
+        private final int mMinSdkVersion;
+
+        private boolean mV1SigningEnabled = true;
+        private boolean mV2SigningEnabled = true;
+        private boolean mOtherSignersSignaturesPreserved;
+
+        /**
+         * Constructs a new {@code Builder}.
+         *
+         * @param signerConfigs information about signers with which the APK will be signed. At
+         *        least one signer configuration must be provided.
+         * @param minSdkVersion API Level of the oldest Android platform on which the APK is
+         *        supposed to be installed. See {@code minSdkVersion} attribute in the APK's
+         *        {@code AndroidManifest.xml}. The higher the version, the stronger signing features
+         *        will be enabled.
+         */
+        public Builder(
+                List<SignerConfig> signerConfigs,
+                int minSdkVersion) {
+            if (signerConfigs.isEmpty()) {
+                throw new IllegalArgumentException("At least one signer config must be provided");
+            }
+            mSignerConfigs = new ArrayList<>(signerConfigs);
+            mMinSdkVersion = minSdkVersion;
+        }
+
+        /**
+         * Returns a new {@code DefaultApkSignerEngine} instance configured based on the
+         * configuration of this builder.
+         */
+        public DefaultApkSignerEngine build() throws InvalidKeyException {
+            return new DefaultApkSignerEngine(
+                    mSignerConfigs,
+                    mMinSdkVersion,
+                    mV1SigningEnabled,
+                    mV2SigningEnabled,
+                    mOtherSignersSignaturesPreserved);
+        }
+
+        /**
+         * Sets whether the APK should be signed using JAR signing (aka v1 signature scheme).
+         *
+         * <p>By default, the APK will be signed using this scheme.
+         */
+        public Builder setV1SigningEnabled(boolean enabled) {
+            mV1SigningEnabled = enabled;
+            return this;
+        }
+
+        /**
+         * Sets whether the APK should be signed using APK Signature Scheme v2 (aka v2 signature
+         * scheme).
+         *
+         * <p>By default, the APK will be signed using this scheme.
+         */
+        public Builder setV2SigningEnabled(boolean enabled) {
+            mV2SigningEnabled = enabled;
+            return this;
+        }
+
+        /**
+         * Sets whether signatures produced by signers other than the ones configured in this engine
+         * should be copied from the input APK to the output APK.
+         *
+         * <p>By default, signatures of other signers are omitted from the output APK.
+         */
+        public Builder setOtherSignersSignaturesPreserved(boolean preserved) {
+            mOtherSignersSignaturesPreserved = preserved;
+            return this;
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/DigestAlgorithm.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/DigestAlgorithm.java
new file mode 100644
index 0000000..71e698b
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/DigestAlgorithm.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.apk.v1;
+
+/**
+ * Digest algorithm used with JAR signing (aka v1 signing scheme).
+ */
+public enum DigestAlgorithm {
+    /** SHA-1 */
+    SHA1("SHA-1"),
+
+    /** SHA2-256 */
+    SHA256("SHA-256");
+
+    private final String mJcaMessageDigestAlgorithm;
+
+    private DigestAlgorithm(String jcaMessageDigestAlgoritm) {
+        mJcaMessageDigestAlgorithm = jcaMessageDigestAlgoritm;
+    }
+
+    /**
+     * Returns the {@link java.security.MessageDigest} algorithm represented by this digest
+     * algorithm.
+     */
+    String getJcaMessageDigestAlgorithm() {
+        return mJcaMessageDigestAlgorithm;
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/V1SchemeSigner.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/V1SchemeSigner.java
new file mode 100644
index 0000000..b99cdec
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v1/V1SchemeSigner.java
@@ -0,0 +1,526 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.apk.v1;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.security.InvalidKeyException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.SignatureException;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.jar.Attributes;
+import java.util.jar.Manifest;
+
+import org.bouncycastle.asn1.ASN1InputStream;
+import org.bouncycastle.asn1.DEROutputStream;
+import org.bouncycastle.cert.jcajce.JcaCertStore;
+import org.bouncycastle.cms.CMSException;
+import org.bouncycastle.cms.CMSProcessableByteArray;
+import org.bouncycastle.cms.CMSSignedData;
+import org.bouncycastle.cms.CMSSignedDataGenerator;
+import org.bouncycastle.cms.jcajce.JcaSignerInfoGeneratorBuilder;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
+import org.bouncycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder;
+
+import com.android.apksigner.core.internal.jar.ManifestWriter;
+import com.android.apksigner.core.internal.jar.SignatureFileWriter;
+import com.android.apksigner.core.internal.util.Pair;
+
+/**
+ * APK signer which uses JAR signing (aka v1 signing scheme).
+ *
+ * @see <a href="https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Signed_JAR_File">Signed JAR File</a>
+ */
+public abstract class V1SchemeSigner {
+
+    public static final String MANIFEST_ENTRY_NAME = "META-INF/MANIFEST.MF";
+
+    private static final Attributes.Name ATTRIBUTE_NAME_CREATED_BY =
+            new Attributes.Name("Created-By");
+    private static final String ATTRIBUTE_DEFALT_VALUE_CREATED_BY = "1.0 (Android apksigner)";
+    private static final String ATTRIBUTE_VALUE_MANIFEST_VERSION = "1.0";
+    private static final String ATTRIBUTE_VALUE_SIGNATURE_VERSION = "1.0";
+
+    private static final Attributes.Name SF_ATTRIBUTE_NAME_ANDROID_APK_SIGNED_NAME =
+            new Attributes.Name("X-Android-APK-Signed");
+
+    /**
+     * Signer configuration.
+     */
+    public static class SignerConfig {
+        /** Name. */
+        public String name;
+
+        /** Private key. */
+        public PrivateKey privateKey;
+
+        /**
+         * Certificates, with the first certificate containing the public key corresponding to
+         * {@link #privateKey}.
+         */
+        public List<X509Certificate> certificates;
+
+        /**
+         * Digest algorithm used for the signature.
+         */
+        public DigestAlgorithm signatureDigestAlgorithm;
+
+        /**
+         * Digest algorithm used for digests of JAR entries and MANIFEST.MF.
+         */
+        public DigestAlgorithm contentDigestAlgorithm;
+    }
+
+    /** Hidden constructor to prevent instantiation. */
+    private V1SchemeSigner() {}
+
+    /**
+     * Gets the JAR signing digest algorithm to be used for signing an APK using the provided key.
+     *
+     * @param minSdkVersion minimum API Level of the platform on which the APK may be installed (see
+     *        AndroidManifest.xml minSdkVersion attribute)
+     *
+     * @throws InvalidKeyException if the provided key is not suitable for signing APKs using
+     *         JAR signing (aka v1 signature scheme)
+     */
+    public static DigestAlgorithm getSuggestedSignatureDigestAlgorithm(
+            PublicKey signingKey, int minSdkVersion) throws InvalidKeyException {
+        String keyAlgorithm = signingKey.getAlgorithm();
+        if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+            // Prior to API Level 18, only SHA-1 can be used with RSA.
+            if (minSdkVersion < 18) {
+                return DigestAlgorithm.SHA1;
+            }
+            return DigestAlgorithm.SHA256;
+        } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+            // Prior to API Level 21, only SHA-1 can be used with DSA
+            if (minSdkVersion < 21) {
+                return DigestAlgorithm.SHA1;
+            } else {
+                return DigestAlgorithm.SHA256;
+            }
+        } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+            if (minSdkVersion < 18) {
+                throw new InvalidKeyException(
+                        "ECDSA signatures only supported for minSdkVersion 18 and higher");
+            }
+            // Prior to API Level 21, only SHA-1 can be used with ECDSA
+            if (minSdkVersion < 21) {
+                return DigestAlgorithm.SHA1;
+            } else {
+                return DigestAlgorithm.SHA256;
+            }
+        } else {
+            throw new InvalidKeyException("Unsupported key algorithm: " + keyAlgorithm);
+        }
+    }
+
+    /**
+     * Returns the JAR signing digest algorithm to be used for JAR entry digests.
+     *
+     * @param minSdkVersion minimum API Level of the platform on which the APK may be installed (see
+     *        AndroidManifest.xml minSdkVersion attribute)
+     */
+    public static DigestAlgorithm getSuggestedContentDigestAlgorithm(int minSdkVersion) {
+        return (minSdkVersion >= 18) ? DigestAlgorithm.SHA256 : DigestAlgorithm.SHA1;
+    }
+
+    /**
+     * Returns a new {@link MessageDigest} instance corresponding to the provided digest algorithm.
+     */
+    public static MessageDigest getMessageDigestInstance(DigestAlgorithm digestAlgorithm) {
+        String jcaAlgorithm = digestAlgorithm.getJcaMessageDigestAlgorithm();
+        try {
+            return MessageDigest.getInstance(jcaAlgorithm);
+        } catch (NoSuchAlgorithmException e) {
+            throw new RuntimeException("Failed to obtain " + jcaAlgorithm + " MessageDigest", e);
+        }
+    }
+
+    /**
+     * Returns {@code true} if the provided JAR entry must be mentioned in signed JAR archive's
+     * manifest.
+     */
+    public static boolean isJarEntryDigestNeededInManifest(String entryName) {
+        // See https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Signed_JAR_File
+
+        // Entries outside of META-INF must be listed in the manifest.
+        if (!entryName.startsWith("META-INF/")) {
+            return true;
+        }
+        // Entries in subdirectories of META-INF must be listed in the manifest.
+        if (entryName.indexOf('/', "META-INF/".length()) != -1) {
+            return true;
+        }
+
+        // Ignored file names (case-insensitive) in META-INF directory:
+        //   MANIFEST.MF
+        //   *.SF
+        //   *.RSA
+        //   *.DSA
+        //   *.EC
+        //   SIG-*
+        String fileNameLowerCase =
+                entryName.substring("META-INF/".length()).toLowerCase(Locale.US);
+        if (("manifest.mf".equals(fileNameLowerCase))
+                || (fileNameLowerCase.endsWith(".sf"))
+                || (fileNameLowerCase.endsWith(".rsa"))
+                || (fileNameLowerCase.endsWith(".dsa"))
+                || (fileNameLowerCase.endsWith(".ec"))
+                || (fileNameLowerCase.startsWith("sig-"))) {
+            return false;
+        }
+        return true;
+    }
+
+    /**
+     * Signs the provided APK using JAR signing (aka v1 signature scheme) and returns the list of
+     * JAR entries which need to be added to the APK as part of the signature.
+     *
+     * @param signerConfigs signer configurations, one for each signer. At least one signer config
+     *        must be provided.
+     *
+     * @throws InvalidKeyException if a signing key is not suitable for this signature scheme or
+     *         cannot be used in general
+     * @throws SignatureException if an error occurs when computing digests of generating
+     *         signatures
+     */
+    public static List<Pair<String, byte[]>> sign(
+            List<SignerConfig> signerConfigs,
+            DigestAlgorithm jarEntryDigestAlgorithm,
+            Map<String, byte[]> jarEntryDigests,
+            List<Integer> apkSigningSchemeIds,
+            byte[] sourceManifestBytes)
+                    throws InvalidKeyException, CertificateEncodingException, SignatureException {
+        if (signerConfigs.isEmpty()) {
+            throw new IllegalArgumentException("At least one signer config must be provided");
+        }
+        OutputManifestFile manifest =
+                generateManifestFile(jarEntryDigestAlgorithm, jarEntryDigests, sourceManifestBytes);
+
+        return signManifest(signerConfigs, jarEntryDigestAlgorithm, apkSigningSchemeIds, manifest);
+    }
+
+    /**
+     * Signs the provided APK using JAR signing (aka v1 signature scheme) and returns the list of
+     * JAR entries which need to be added to the APK as part of the signature.
+     *
+     * @param signerConfigs signer configurations, one for each signer. At least one signer config
+     *        must be provided.
+     *
+     * @throws InvalidKeyException if a signing key is not suitable for this signature scheme or
+     *         cannot be used in general
+     * @throws SignatureException if an error occurs when computing digests of generating
+     *         signatures
+     */
+    public static List<Pair<String, byte[]>> signManifest(
+            List<SignerConfig> signerConfigs,
+            DigestAlgorithm digestAlgorithm,
+            List<Integer> apkSigningSchemeIds,
+            OutputManifestFile manifest)
+                    throws InvalidKeyException, CertificateEncodingException, SignatureException {
+        if (signerConfigs.isEmpty()) {
+            throw new IllegalArgumentException("At least one signer config must be provided");
+        }
+
+        // For each signer output .SF and .(RSA|DSA|EC) file, then output MANIFEST.MF.
+        List<Pair<String, byte[]>> signatureJarEntries =
+                new ArrayList<>(2 * signerConfigs.size() + 1);
+        byte[] sfBytes =
+                generateSignatureFile(apkSigningSchemeIds, digestAlgorithm, manifest);
+        for (SignerConfig signerConfig : signerConfigs) {
+            String signerName = signerConfig.name;
+            byte[] signatureBlock;
+            try {
+                signatureBlock = generateSignatureBlock(signerConfig, sfBytes);
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException(
+                        "Failed to sign using signer \"" + signerName + "\"", e);
+            } catch (CertificateEncodingException e) {
+                throw new CertificateEncodingException(
+                        "Failed to sign using signer \"" + signerName + "\"", e);
+            } catch (SignatureException e) {
+                throw new SignatureException(
+                        "Failed to sign using signer \"" + signerName + "\"", e);
+            }
+            signatureJarEntries.add(Pair.of("META-INF/" + signerName + ".SF", sfBytes));
+            PublicKey publicKey = signerConfig.certificates.get(0).getPublicKey();
+            String signatureBlockFileName =
+                    "META-INF/" + signerName + "."
+                            + publicKey.getAlgorithm().toUpperCase(Locale.US);
+            signatureJarEntries.add(
+                    Pair.of(signatureBlockFileName, signatureBlock));
+        }
+        signatureJarEntries.add(Pair.of(MANIFEST_ENTRY_NAME, manifest.contents));
+        return signatureJarEntries;
+    }
+
+    /**
+     * Returns the names of JAR entries which this signer will produce as part of v1 signature.
+     */
+    public static Set<String> getOutputEntryNames(List<SignerConfig> signerConfigs) {
+        Set<String> result = new HashSet<>(2 * signerConfigs.size() + 1);
+        for (SignerConfig signerConfig : signerConfigs) {
+            String signerName = signerConfig.name;
+            result.add("META-INF/" + signerName + ".SF");
+            PublicKey publicKey = signerConfig.certificates.get(0).getPublicKey();
+            String signatureBlockFileName =
+                    "META-INF/" + signerName + "."
+                            + publicKey.getAlgorithm().toUpperCase(Locale.US);
+            result.add(signatureBlockFileName);
+        }
+        result.add(MANIFEST_ENTRY_NAME);
+        return result;
+    }
+
+    /**
+     * Generated and returns the {@code META-INF/MANIFEST.MF} file based on the provided (optional)
+     * input {@code MANIFEST.MF} and digests of JAR entries covered by the manifest.
+     */
+    public static OutputManifestFile generateManifestFile(
+            DigestAlgorithm jarEntryDigestAlgorithm,
+            Map<String, byte[]> jarEntryDigests,
+            byte[] sourceManifestBytes) {
+        Manifest sourceManifest = null;
+        if (sourceManifestBytes != null) {
+            try {
+                sourceManifest = new Manifest(new ByteArrayInputStream(sourceManifestBytes));
+            } catch (IOException e) {
+                throw new IllegalArgumentException("Failed to parse source MANIFEST.MF", e);
+            }
+        }
+        ByteArrayOutputStream manifestOut = new ByteArrayOutputStream();
+        Attributes mainAttrs = new Attributes();
+        // Copy the main section from the source manifest (if provided). Otherwise use defaults.
+        if (sourceManifest != null) {
+            mainAttrs.putAll(sourceManifest.getMainAttributes());
+        } else {
+            mainAttrs.put(Attributes.Name.MANIFEST_VERSION, ATTRIBUTE_VALUE_MANIFEST_VERSION);
+            mainAttrs.put(ATTRIBUTE_NAME_CREATED_BY, ATTRIBUTE_DEFALT_VALUE_CREATED_BY);
+        }
+
+        try {
+            ManifestWriter.writeMainSection(manifestOut, mainAttrs);
+        } catch (IOException e) {
+            throw new RuntimeException("Failed to write in-memory MANIFEST.MF", e);
+        }
+
+        List<String> sortedEntryNames = new ArrayList<>(jarEntryDigests.keySet());
+        Collections.sort(sortedEntryNames);
+        SortedMap<String, byte[]> invidualSectionsContents = new TreeMap<>();
+        String entryDigestAttributeName = getEntryDigestAttributeName(jarEntryDigestAlgorithm);
+        for (String entryName : sortedEntryNames) {
+            byte[] entryDigest = jarEntryDigests.get(entryName);
+            Attributes entryAttrs = new Attributes();
+            entryAttrs.putValue(
+                    entryDigestAttributeName,
+                    Base64.getEncoder().encodeToString(entryDigest));
+            ByteArrayOutputStream sectionOut = new ByteArrayOutputStream();
+            byte[] sectionBytes;
+            try {
+                ManifestWriter.writeIndividualSection(sectionOut, entryName, entryAttrs);
+                sectionBytes = sectionOut.toByteArray();
+                manifestOut.write(sectionBytes);
+            } catch (IOException e) {
+                throw new RuntimeException("Failed to write in-memory MANIFEST.MF", e);
+            }
+            invidualSectionsContents.put(entryName, sectionBytes);
+        }
+
+        OutputManifestFile result = new OutputManifestFile();
+        result.contents = manifestOut.toByteArray();
+        result.mainSectionAttributes = mainAttrs;
+        result.individualSectionsContents = invidualSectionsContents;
+        return result;
+    }
+
+    public static class OutputManifestFile {
+        public byte[] contents;
+        public SortedMap<String, byte[]> individualSectionsContents;
+        public Attributes mainSectionAttributes;
+    }
+
+    private static byte[] generateSignatureFile(
+            List<Integer> apkSignatureSchemeIds,
+            DigestAlgorithm manifestDigestAlgorithm,
+            OutputManifestFile manifest) {
+        Manifest sf = new Manifest();
+        Attributes mainAttrs = sf.getMainAttributes();
+        mainAttrs.put(Attributes.Name.SIGNATURE_VERSION, ATTRIBUTE_VALUE_SIGNATURE_VERSION);
+        mainAttrs.put(ATTRIBUTE_NAME_CREATED_BY, ATTRIBUTE_DEFALT_VALUE_CREATED_BY);
+        if (!apkSignatureSchemeIds.isEmpty()) {
+            // Add APK Signature Scheme v2 (and newer) signature stripping protection.
+            // This attribute indicates that this APK is supposed to have been signed using one or
+            // more APK-specific signature schemes in addition to the standard JAR signature scheme
+            // used by this code. APK signature verifier should reject the APK if it does not
+            // contain a signature for the signature scheme the verifier prefers out of this set.
+            StringBuilder attrValue = new StringBuilder();
+            for (int id : apkSignatureSchemeIds) {
+                if (attrValue.length() > 0) {
+                    attrValue.append(", ");
+                }
+                attrValue.append(String.valueOf(id));
+            }
+            mainAttrs.put(
+                    SF_ATTRIBUTE_NAME_ANDROID_APK_SIGNED_NAME,
+                    attrValue.toString());
+        }
+
+        // Add main attribute containing the digest of MANIFEST.MF.
+        MessageDigest md = getMessageDigestInstance(manifestDigestAlgorithm);
+        mainAttrs.putValue(
+                getManifestDigestAttributeName(manifestDigestAlgorithm),
+                Base64.getEncoder().encodeToString(md.digest(manifest.contents)));
+        ByteArrayOutputStream out = new ByteArrayOutputStream();
+        try {
+            SignatureFileWriter.writeMainSection(out, mainAttrs);
+        } catch (IOException e) {
+            throw new RuntimeException("Failed to write in-memory .SF file", e);
+        }
+        String entryDigestAttributeName = getEntryDigestAttributeName(manifestDigestAlgorithm);
+        for (Map.Entry<String, byte[]> manifestSection
+                : manifest.individualSectionsContents.entrySet()) {
+            String sectionName = manifestSection.getKey();
+            byte[] sectionContents = manifestSection.getValue();
+            byte[] sectionDigest = md.digest(sectionContents);
+            Attributes attrs = new Attributes();
+            attrs.putValue(
+                    entryDigestAttributeName,
+                    Base64.getEncoder().encodeToString(sectionDigest));
+
+            try {
+                SignatureFileWriter.writeIndividualSection(out, sectionName, attrs);
+            } catch (IOException e) {
+                throw new RuntimeException("Failed to write in-memory .SF file", e);
+            }
+        }
+
+        // A bug in the java.util.jar implementation of Android platforms up to version 1.6 will
+        // cause a spurious IOException to be thrown if the length of the signature file is a
+        // multiple of 1024 bytes. As a workaround, add an extra CRLF in this case.
+        if ((out.size() > 0) && ((out.size() % 1024) == 0)) {
+            try {
+                SignatureFileWriter.writeSectionDelimiter(out);
+            } catch (IOException e) {
+                throw new RuntimeException("Failed to write to ByteArrayOutputStream", e);
+            }
+        }
+
+        return out.toByteArray();
+    }
+
+    private static byte[] generateSignatureBlock(
+            SignerConfig signerConfig, byte[] signatureFileBytes)
+                    throws InvalidKeyException, CertificateEncodingException, SignatureException {
+        JcaCertStore certs = new JcaCertStore(signerConfig.certificates);
+        X509Certificate signerCert = signerConfig.certificates.get(0);
+        String jcaSignatureAlgorithm =
+                getJcaSignatureAlgorithm(
+                        signerCert.getPublicKey(), signerConfig.signatureDigestAlgorithm);
+        try {
+            ContentSigner signer =
+                    new JcaContentSignerBuilder(jcaSignatureAlgorithm)
+                    .build(signerConfig.privateKey);
+            CMSSignedDataGenerator gen = new CMSSignedDataGenerator();
+            gen.addSignerInfoGenerator(
+                    new JcaSignerInfoGeneratorBuilder(
+                            new JcaDigestCalculatorProviderBuilder().build())
+                    .setDirectSignature(true)
+                    .build(signer, signerCert));
+            gen.addCertificates(certs);
+
+            CMSSignedData sigData =
+                    gen.generate(new CMSProcessableByteArray(signatureFileBytes), false);
+
+            ByteArrayOutputStream out = new ByteArrayOutputStream();
+            try (ASN1InputStream asn1 = new ASN1InputStream(sigData.getEncoded())) {
+                DEROutputStream dos = new DEROutputStream(out);
+                dos.writeObject(asn1.readObject());
+            }
+            return out.toByteArray();
+        } catch (OperatorCreationException | CMSException | IOException e) {
+            throw new SignatureException("Failed to generate signature", e);
+        }
+    }
+
+    private static String getEntryDigestAttributeName(DigestAlgorithm digestAlgorithm) {
+        switch (digestAlgorithm) {
+            case SHA1:
+                return "SHA1-Digest";
+            case SHA256:
+                return "SHA-256-Digest";
+            default:
+                throw new IllegalArgumentException(
+                        "Unexpected content digest algorithm: " + digestAlgorithm);
+        }
+    }
+
+    private static String getManifestDigestAttributeName(DigestAlgorithm digestAlgorithm) {
+        switch (digestAlgorithm) {
+            case SHA1:
+                return "SHA1-Digest-Manifest";
+            case SHA256:
+                return "SHA-256-Digest-Manifest";
+            default:
+                throw new IllegalArgumentException(
+                        "Unexpected content digest algorithm: " + digestAlgorithm);
+        }
+    }
+
+    private static String getJcaSignatureAlgorithm(
+            PublicKey publicKey, DigestAlgorithm digestAlgorithm) throws InvalidKeyException {
+        String keyAlgorithm = publicKey.getAlgorithm();
+        String digestPrefixForSigAlg;
+        switch (digestAlgorithm) {
+            case SHA1:
+                digestPrefixForSigAlg = "SHA1";
+                break;
+            case SHA256:
+                digestPrefixForSigAlg = "SHA256";
+                break;
+            default:
+                throw new IllegalArgumentException(
+                        "Unexpected digest algorithm: " + digestAlgorithm);
+        }
+        if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+            return digestPrefixForSigAlg + "withRSA";
+        } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+            return digestPrefixForSigAlg + "withDSA";
+        } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+            return digestPrefixForSigAlg + "withECDSA";
+        } else {
+            throw new InvalidKeyException("Unsupported key algorithm: " + keyAlgorithm);
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/ContentDigestAlgorithm.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/ContentDigestAlgorithm.java
new file mode 100644
index 0000000..cb0f84a
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/ContentDigestAlgorithm.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.apk.v2;
+
+/**
+ * APK Signature Scheme v2 content digest algorithm.
+ */
+enum ContentDigestAlgorithm {
+    /** SHA2-256 over 1 MB chunks. */
+    CHUNKED_SHA256("SHA-256", 256 / 8),
+
+    /** SHA2-512 over 1 MB chunks. */
+    CHUNKED_SHA512("SHA-512", 512 / 8);
+
+    private final String mJcaMessageDigestAlgorithm;
+    private final int mChunkDigestOutputSizeBytes;
+
+    private ContentDigestAlgorithm(
+            String jcaMessageDigestAlgorithm, int chunkDigestOutputSizeBytes) {
+        mJcaMessageDigestAlgorithm = jcaMessageDigestAlgorithm;
+        mChunkDigestOutputSizeBytes = chunkDigestOutputSizeBytes;
+    }
+
+    /**
+     * Returns the {@link java.security.MessageDigest} algorithm used for computing digests of
+     * chunks by this content digest algorithm.
+     */
+    String getJcaMessageDigestAlgorithm() {
+        return mJcaMessageDigestAlgorithm;
+    }
+
+    /**
+     * Returns the size (in bytes) of the digest of a chunk of content.
+     */
+    int getChunkDigestOutputSizeBytes() {
+        return mChunkDigestOutputSizeBytes;
+    }
+}
\ No newline at end of file
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/MessageDigestSink.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/MessageDigestSink.java
new file mode 100644
index 0000000..9ef04bf
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/MessageDigestSink.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.apksigner.core.internal.apk.v2;
+
+import com.android.apksigner.core.util.DataSink;
+
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
+
+/**
+ * Data sink which feeds all received data into the associated {@link MessageDigest} instances. Each
+ * {@code MessageDigest} instance receives the same data.
+ */
+public class MessageDigestSink implements DataSink {
+
+    private final MessageDigest[] mMessageDigests;
+
+    public MessageDigestSink(MessageDigest[] digests) {
+        mMessageDigests = digests;
+    }
+
+    @Override
+    public void consume(byte[] buf, int offset, int length) {
+        for (MessageDigest md : mMessageDigests) {
+            md.update(buf, offset, length);
+        }
+    }
+
+    @Override
+    public void consume(ByteBuffer buf) {
+        int originalPosition = buf.position();
+        for (MessageDigest md : mMessageDigests) {
+            // Reset the position back to the original because the previous iteration's
+            // MessageDigest.update set the buffer's position to the buffer's limit.
+            buf.position(originalPosition);
+            md.update(buf);
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/SignatureAlgorithm.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/SignatureAlgorithm.java
new file mode 100644
index 0000000..3c7b5f0
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/SignatureAlgorithm.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.apk.v2;
+
+import com.android.apksigner.core.internal.util.Pair;
+
+import java.security.spec.AlgorithmParameterSpec;
+import java.security.spec.MGF1ParameterSpec;
+import java.security.spec.PSSParameterSpec;
+
+/**
+ * APK Signature Scheme v2 content digest algorithm.
+ */
+public enum SignatureAlgorithm {
+    /**
+     * RSASSA-PSS with SHA2-256 digest, SHA2-256 MGF1, 32 bytes of salt, trailer: 0xbc, content
+     * digested using SHA2-256 in 1 MB chunks.
+     */
+    RSA_PSS_WITH_SHA256(
+            0x0101,
+            ContentDigestAlgorithm.CHUNKED_SHA256,
+            "RSA",
+            Pair.of("SHA256withRSA/PSS",
+                    new PSSParameterSpec(
+                            "SHA-256", "MGF1", MGF1ParameterSpec.SHA256, 256 / 8, 1))),
+
+    /**
+     * RSASSA-PSS with SHA2-512 digest, SHA2-512 MGF1, 64 bytes of salt, trailer: 0xbc, content
+     * digested using SHA2-512 in 1 MB chunks.
+     */
+    RSA_PSS_WITH_SHA512(
+            0x0102,
+            ContentDigestAlgorithm.CHUNKED_SHA512,
+            "RSA",
+            Pair.of(
+                    "SHA512withRSA/PSS",
+                    new PSSParameterSpec(
+                            "SHA-512", "MGF1", MGF1ParameterSpec.SHA512, 512 / 8, 1))),
+
+    /** RSASSA-PKCS1-v1_5 with SHA2-256 digest, content digested using SHA2-256 in 1 MB chunks. */
+    RSA_PKCS1_V1_5_WITH_SHA256(
+            0x0103,
+            ContentDigestAlgorithm.CHUNKED_SHA256,
+            "RSA",
+            Pair.of("SHA256withRSA", null)),
+
+    /** RSASSA-PKCS1-v1_5 with SHA2-512 digest, content digested using SHA2-512 in 1 MB chunks. */
+    RSA_PKCS1_V1_5_WITH_SHA512(
+            0x0104,
+            ContentDigestAlgorithm.CHUNKED_SHA512,
+            "RSA",
+            Pair.of("SHA512withRSA", null)),
+
+    /** ECDSA with SHA2-256 digest, content digested using SHA2-256 in 1 MB chunks. */
+    ECDSA_WITH_SHA256(
+            0x0201,
+            ContentDigestAlgorithm.CHUNKED_SHA256,
+            "EC",
+            Pair.of("SHA256withECDSA", null)),
+
+    /** ECDSA with SHA2-512 digest, content digested using SHA2-512 in 1 MB chunks. */
+    ECDSA_WITH_SHA512(
+            0x0202,
+            ContentDigestAlgorithm.CHUNKED_SHA512,
+            "EC",
+            Pair.of("SHA512withECDSA", null)),
+
+    /** DSA with SHA2-256 digest, content digested using SHA2-256 in 1 MB chunks. */
+    DSA_WITH_SHA256(
+            0x0301,
+            ContentDigestAlgorithm.CHUNKED_SHA256,
+            "DSA",
+            Pair.of("SHA256withDSA", null));
+
+    private final int mId;
+    private final String mJcaKeyAlgorithm;
+    private final ContentDigestAlgorithm mContentDigestAlgorithm;
+    private final Pair<String, ? extends AlgorithmParameterSpec> mJcaSignatureAlgAndParams;
+
+    private SignatureAlgorithm(int id,
+            ContentDigestAlgorithm contentDigestAlgorithm,
+            String jcaKeyAlgorithm,
+            Pair<String, ? extends AlgorithmParameterSpec> jcaSignatureAlgAndParams) {
+        mId = id;
+        mContentDigestAlgorithm = contentDigestAlgorithm;
+        mJcaKeyAlgorithm = jcaKeyAlgorithm;
+        mJcaSignatureAlgAndParams = jcaSignatureAlgAndParams;
+    }
+
+    /**
+     * Returns the ID of this signature algorithm as used in APK Signature Scheme v2 wire format.
+     */
+    int getId() {
+        return mId;
+    }
+
+    /**
+     * Returns the content digest algorithm associated with this signature algorithm.
+     */
+    ContentDigestAlgorithm getContentDigestAlgorithm() {
+        return mContentDigestAlgorithm;
+    }
+
+    /**
+     * Returns the JCA {@link java.security.Key} algorithm used by this signature scheme.
+     */
+    String getJcaKeyAlgorithm() {
+        return mJcaKeyAlgorithm;
+    }
+
+    /**
+     * Returns the {@link java.security.Signature} algorithm and the {@link AlgorithmParameterSpec}
+     * (or null if not needed) to parameterize the {@code Signature}.
+     */
+    Pair<String, ? extends AlgorithmParameterSpec> getJcaSignatureAlgorithmAndParams() {
+        return mJcaSignatureAlgAndParams;
+    }
+
+    static SignatureAlgorithm findById(int id) {
+        for (SignatureAlgorithm alg : SignatureAlgorithm.values()) {
+            if (alg.getId() == id) {
+                return alg;
+            }
+        }
+
+        return null;
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/V2SchemeSigner.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/V2SchemeSigner.java
new file mode 100644
index 0000000..e185346
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/apk/v2/V2SchemeSigner.java
@@ -0,0 +1,614 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.apk.v2;
+
+import com.android.apksigner.core.internal.util.ByteBufferSink;
+import com.android.apksigner.core.internal.util.Pair;
+import com.android.apksigner.core.internal.zip.ZipUtils;
+import com.android.apksigner.core.util.DataSource;
+import com.android.apksigner.core.util.DataSources;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.security.DigestException;
+import java.security.InvalidAlgorithmParameterException;
+import java.security.InvalidKeyException;
+import java.security.KeyFactory;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.Signature;
+import java.security.SignatureException;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.security.interfaces.ECKey;
+import java.security.interfaces.RSAKey;
+import java.security.spec.AlgorithmParameterSpec;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * APK Signature Scheme v2 signer.
+ *
+ * <p>APK Signature Scheme v2 is a whole-file signature scheme which aims to protect every single
+ * bit of the APK, as opposed to the JAR Signature Scheme which protects only the names and
+ * uncompressed contents of ZIP entries.
+ *
+ * <p>TODO: Link to APK Signature Scheme v2 documentation once it's available.
+ */
+public abstract class V2SchemeSigner {
+    /*
+     * The two main goals of APK Signature Scheme v2 are:
+     * 1. Detect any unauthorized modifications to the APK. This is achieved by making the signature
+     *    cover every byte of the APK being signed.
+     * 2. Enable much faster signature and integrity verification. This is achieved by requiring
+     *    only a minimal amount of APK parsing before the signature is verified, thus completely
+     *    bypassing ZIP entry decompression and by making integrity verification parallelizable by
+     *    employing a hash tree.
+     *
+     * The generated signature block is wrapped into an APK Signing Block and inserted into the
+     * original APK immediately before the start of ZIP Central Directory. This is to ensure that
+     * JAR and ZIP parsers continue to work on the signed APK. The APK Signing Block is designed for
+     * extensibility. For example, a future signature scheme could insert its signatures there as
+     * well. The contract of the APK Signing Block is that all contents outside of the block must be
+     * protected by signatures inside the block.
+     */
+
+    private static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024;
+
+    private static final byte[] APK_SIGNING_BLOCK_MAGIC =
+          new byte[] {
+              0x41, 0x50, 0x4b, 0x20, 0x53, 0x69, 0x67, 0x20,
+              0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32,
+          };
+    private static final int APK_SIGNATURE_SCHEME_V2_BLOCK_ID = 0x7109871a;
+
+    /**
+     * Signer configuration.
+     */
+    public static class SignerConfig {
+        /** Private key. */
+        public PrivateKey privateKey;
+
+        /**
+         * Certificates, with the first certificate containing the public key corresponding to
+         * {@link #privateKey}.
+         */
+        public List<X509Certificate> certificates;
+
+        /**
+         * List of signature algorithms with which to sign.
+         */
+        public List<SignatureAlgorithm> signatureAlgorithms;
+    }
+
+    /** Hidden constructor to prevent instantiation. */
+    private V2SchemeSigner() {}
+
+    /**
+     * Gets the APK Signature Scheme v2 signature algorithms to be used for signing an APK using the
+     * provided key.
+     *
+     * @param minSdkVersion minimum API Level of the platform on which the APK may be installed (see
+     *        AndroidManifest.xml minSdkVersion attribute).
+     *
+     * @throws InvalidKeyException if the provided key is not suitable for signing APKs using
+     *         APK Signature Scheme v2
+     */
+    public static List<SignatureAlgorithm> getSuggestedSignatureAlgorithms(
+            PublicKey signingKey, int minSdkVersion) throws InvalidKeyException {
+        String keyAlgorithm = signingKey.getAlgorithm();
+        if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+            // Use RSASSA-PKCS1-v1_5 signature scheme instead of RSASSA-PSS to guarantee
+            // deterministic signatures which make life easier for OTA updates (fewer files
+            // changed when deterministic signature schemes are used).
+
+            // Pick a digest which is no weaker than the key.
+            int modulusLengthBits = ((RSAKey) signingKey).getModulus().bitLength();
+            if (modulusLengthBits <= 3072) {
+                // 3072-bit RSA is roughly 128-bit strong, meaning SHA-256 is a good fit.
+                return Collections.singletonList(SignatureAlgorithm.RSA_PKCS1_V1_5_WITH_SHA256);
+            } else {
+                // Keys longer than 3072 bit need to be paired with a stronger digest to avoid the
+                // digest being the weak link. SHA-512 is the next strongest supported digest.
+                return Collections.singletonList(SignatureAlgorithm.RSA_PKCS1_V1_5_WITH_SHA512);
+            }
+        } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+            // DSA is supported only with SHA-256.
+            return Collections.singletonList(SignatureAlgorithm.DSA_WITH_SHA256);
+        } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+            // Pick a digest which is no weaker than the key.
+            int keySizeBits = ((ECKey) signingKey).getParams().getOrder().bitLength();
+            if (keySizeBits <= 256) {
+                // 256-bit Elliptic Curve is roughly 128-bit strong, meaning SHA-256 is a good fit.
+                return Collections.singletonList(SignatureAlgorithm.ECDSA_WITH_SHA256);
+            } else {
+                // Keys longer than 256 bit need to be paired with a stronger digest to avoid the
+                // digest being the weak link. SHA-512 is the next strongest supported digest.
+                return Collections.singletonList(SignatureAlgorithm.ECDSA_WITH_SHA512);
+            }
+        } else {
+            throw new InvalidKeyException("Unsupported key algorithm: " + keyAlgorithm);
+        }
+    }
+
+    /**
+     * Signs the provided APK using APK Signature Scheme v2 and returns the APK Signing Block
+     * containing the signature.
+     *
+     * @param signerConfigs signer configurations, one for each signer At least one signer config
+     *        must be provided.
+     *
+     * @throws IOException if an I/O error occurs
+     * @throws InvalidKeyException if a signing key is not suitable for this signature scheme or
+     *         cannot be used in general
+     * @throws SignatureException if an error occurs when computing digests of generating
+     *         signatures
+     */
+    public static byte[] generateApkSigningBlock(
+            DataSource beforeCentralDir,
+            DataSource centralDir,
+            DataSource eocd,
+            List<SignerConfig> signerConfigs)
+                        throws IOException, InvalidKeyException, SignatureException {
+        if (signerConfigs.isEmpty()) {
+            throw new IllegalArgumentException(
+                    "No signer configs provided. At least one is required");
+        }
+
+        // Figure out which digest(s) to use for APK contents.
+        Set<ContentDigestAlgorithm> contentDigestAlgorithms = new HashSet<>(1);
+        for (SignerConfig signerConfig : signerConfigs) {
+            for (SignatureAlgorithm signatureAlgorithm : signerConfig.signatureAlgorithms) {
+                contentDigestAlgorithms.add(signatureAlgorithm.getContentDigestAlgorithm());
+            }
+        }
+
+        // Ensure that, when digesting, ZIP End of Central Directory record's Central Directory
+        // offset field is treated as pointing to the offset at which the APK Signing Block will
+        // start.
+        long centralDirOffsetForDigesting = beforeCentralDir.size();
+        ByteBuffer eocdBuf = copyToByteBuffer(eocd);
+        eocdBuf.order(ByteOrder.LITTLE_ENDIAN);
+        ZipUtils.setZipEocdCentralDirectoryOffset(eocdBuf, centralDirOffsetForDigesting);
+
+        // Compute digests of APK contents.
+        Map<ContentDigestAlgorithm, byte[]> contentDigests; // digest algorithm ID -> digest
+        try {
+            contentDigests =
+                    computeContentDigests(
+                            contentDigestAlgorithms,
+                            new DataSource[] {
+                                    beforeCentralDir,
+                                    centralDir,
+                                    DataSources.asDataSource(eocdBuf)});
+        } catch (IOException e) {
+            throw new IOException("Failed to read APK being signed", e);
+        } catch (DigestException e) {
+            throw new SignatureException("Failed to compute digests of APK", e);
+        }
+
+        // Sign the digests and wrap the signatures and signer info into an APK Signing Block.
+        return generateApkSigningBlock(signerConfigs, contentDigests);
+    }
+
+    private static Map<ContentDigestAlgorithm, byte[]> computeContentDigests(
+            Set<ContentDigestAlgorithm> digestAlgorithms,
+            DataSource[] contents) throws IOException, DigestException {
+        // For each digest algorithm the result is computed as follows:
+        // 1. Each segment of contents is split into consecutive chunks of 1 MB in size.
+        //    The final chunk will be shorter iff the length of segment is not a multiple of 1 MB.
+        //    No chunks are produced for empty (zero length) segments.
+        // 2. The digest of each chunk is computed over the concatenation of byte 0xa5, the chunk's
+        //    length in bytes (uint32 little-endian) and the chunk's contents.
+        // 3. The output digest is computed over the concatenation of the byte 0x5a, the number of
+        //    chunks (uint32 little-endian) and the concatenation of digests of chunks of all
+        //    segments in-order.
+
+        long chunkCountLong = 0;
+        for (DataSource input : contents) {
+            chunkCountLong +=
+                    getChunkCount(input.size(), CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+        }
+        if (chunkCountLong > Integer.MAX_VALUE) {
+            throw new DigestException("Input too long: " + chunkCountLong + " chunks");
+        }
+        int chunkCount = (int) chunkCountLong;
+
+        ContentDigestAlgorithm[] digestAlgorithmsArray =
+                digestAlgorithms.toArray(new ContentDigestAlgorithm[digestAlgorithms.size()]);
+        MessageDigest[] mds = new MessageDigest[digestAlgorithmsArray.length];
+        byte[][] digestsOfChunks = new byte[digestAlgorithmsArray.length][];
+        int[] digestOutputSizes = new int[digestAlgorithmsArray.length];
+        for (int i = 0; i < digestAlgorithmsArray.length; i++) {
+            ContentDigestAlgorithm digestAlgorithm = digestAlgorithmsArray[i];
+            int digestOutputSizeBytes = digestAlgorithm.getChunkDigestOutputSizeBytes();
+            digestOutputSizes[i] = digestOutputSizeBytes;
+            byte[] concatenationOfChunkCountAndChunkDigests =
+                    new byte[5 + chunkCount * digestOutputSizeBytes];
+            concatenationOfChunkCountAndChunkDigests[0] = 0x5a;
+            setUnsignedInt32LittleEndian(
+                    chunkCount, concatenationOfChunkCountAndChunkDigests, 1);
+            digestsOfChunks[i] = concatenationOfChunkCountAndChunkDigests;
+            String jcaAlgorithm = digestAlgorithm.getJcaMessageDigestAlgorithm();
+            try {
+                mds[i] = MessageDigest.getInstance(jcaAlgorithm);
+            } catch (NoSuchAlgorithmException e) {
+                throw new RuntimeException(jcaAlgorithm + " MessageDigest not supported", e);
+            }
+        }
+
+        MessageDigestSink mdSink = new MessageDigestSink(mds);
+        byte[] chunkContentPrefix = new byte[5];
+        chunkContentPrefix[0] = (byte) 0xa5;
+        int chunkIndex = 0;
+        // Optimization opportunity: digests of chunks can be computed in parallel. However,
+        // determining the number of computations to be performed in parallel is non-trivial. This
+        // depends on a wide range of factors, such as data source type (e.g., in-memory or fetched
+        // from file), CPU/memory/disk cache bandwidth and latency, interconnect architecture of CPU
+        // cores, load on the system from other threads of execution and other processes, size of
+        // input.
+        // For now, we compute these digests sequentially and thus have the luxury of improving
+        // performance by writing the digest of each chunk into a pre-allocated buffer at exactly
+        // the right position. This avoids unnecessary allocations, copying, and enables the final
+        // digest to be more efficient because it's presented with all of its input in one go.
+        for (DataSource input : contents) {
+            long inputOffset = 0;
+            long inputRemaining = input.size();
+            while (inputRemaining > 0) {
+                int chunkSize =
+                        (int) Math.min(inputRemaining, CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+                setUnsignedInt32LittleEndian(chunkSize, chunkContentPrefix, 1);
+                for (int i = 0; i < mds.length; i++) {
+                    mds[i].update(chunkContentPrefix);
+                }
+                try {
+                    input.feed(inputOffset, chunkSize, mdSink);
+                } catch (IOException e) {
+                    throw new IOException("Failed to read chunk #" + chunkIndex, e);
+                }
+                for (int i = 0; i < digestAlgorithmsArray.length; i++) {
+                    MessageDigest md = mds[i];
+                    byte[] concatenationOfChunkCountAndChunkDigests = digestsOfChunks[i];
+                    int expectedDigestSizeBytes = digestOutputSizes[i];
+                    int actualDigestSizeBytes =
+                            md.digest(
+                                    concatenationOfChunkCountAndChunkDigests,
+                                    5 + chunkIndex * expectedDigestSizeBytes,
+                                    expectedDigestSizeBytes);
+                    if (actualDigestSizeBytes != expectedDigestSizeBytes) {
+                        throw new RuntimeException(
+                                "Unexpected output size of " + md.getAlgorithm()
+                                        + " digest: " + actualDigestSizeBytes);
+                    }
+                }
+                inputOffset += chunkSize;
+                inputRemaining -= chunkSize;
+                chunkIndex++;
+            }
+        }
+
+        Map<ContentDigestAlgorithm, byte[]> result = new HashMap<>(digestAlgorithmsArray.length);
+        for (int i = 0; i < digestAlgorithmsArray.length; i++) {
+            ContentDigestAlgorithm digestAlgorithm = digestAlgorithmsArray[i];
+            byte[] concatenationOfChunkCountAndChunkDigests = digestsOfChunks[i];
+            MessageDigest md = mds[i];
+            byte[] digest = md.digest(concatenationOfChunkCountAndChunkDigests);
+            result.put(digestAlgorithm, digest);
+        }
+        return result;
+    }
+
+    private static final long getChunkCount(long inputSize, int chunkSize) {
+        return (inputSize + chunkSize - 1) / chunkSize;
+    }
+
+    private static void setUnsignedInt32LittleEndian(int value, byte[] result, int offset) {
+        result[offset] = (byte) (value & 0xff);
+        result[offset + 1] = (byte) ((value >> 8) & 0xff);
+        result[offset + 2] = (byte) ((value >> 16) & 0xff);
+        result[offset + 3] = (byte) ((value >> 24) & 0xff);
+    }
+
+    private static byte[] generateApkSigningBlock(
+            List<SignerConfig> signerConfigs,
+            Map<ContentDigestAlgorithm, byte[]> contentDigests)
+                    throws InvalidKeyException, SignatureException {
+        byte[] apkSignatureSchemeV2Block =
+                generateApkSignatureSchemeV2Block(signerConfigs, contentDigests);
+        return generateApkSigningBlock(apkSignatureSchemeV2Block);
+    }
+
+    private static byte[] generateApkSigningBlock(byte[] apkSignatureSchemeV2Block) {
+        // FORMAT:
+        // uint64:  size (excluding this field)
+        // repeated ID-value pairs:
+        //     uint64:           size (excluding this field)
+        //     uint32:           ID
+        //     (size - 4) bytes: value
+        // uint64:  size (same as the one above)
+        // uint128: magic
+
+        int resultSize =
+                8 // size
+                + 8 + 4 + apkSignatureSchemeV2Block.length // v2Block as ID-value pair
+                + 8 // size
+                + 16 // magic
+                ;
+        ByteBuffer result = ByteBuffer.allocate(resultSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        long blockSizeFieldValue = resultSize - 8;
+        result.putLong(blockSizeFieldValue);
+
+        long pairSizeFieldValue = 4 + apkSignatureSchemeV2Block.length;
+        result.putLong(pairSizeFieldValue);
+        result.putInt(APK_SIGNATURE_SCHEME_V2_BLOCK_ID);
+        result.put(apkSignatureSchemeV2Block);
+
+        result.putLong(blockSizeFieldValue);
+        result.put(APK_SIGNING_BLOCK_MAGIC);
+
+        return result.array();
+    }
+
+    private static byte[] generateApkSignatureSchemeV2Block(
+            List<SignerConfig> signerConfigs,
+            Map<ContentDigestAlgorithm, byte[]> contentDigests)
+                    throws InvalidKeyException, SignatureException {
+        // FORMAT:
+        // * length-prefixed sequence of length-prefixed signer blocks.
+
+        List<byte[]> signerBlocks = new ArrayList<>(signerConfigs.size());
+        int signerNumber = 0;
+        for (SignerConfig signerConfig : signerConfigs) {
+            signerNumber++;
+            byte[] signerBlock;
+            try {
+                signerBlock = generateSignerBlock(signerConfig, contentDigests);
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Signer #" + signerNumber + " failed", e);
+            } catch (SignatureException e) {
+                throw new SignatureException("Signer #" + signerNumber + " failed", e);
+            }
+            signerBlocks.add(signerBlock);
+        }
+
+        return encodeAsSequenceOfLengthPrefixedElements(
+                new byte[][] {
+                    encodeAsSequenceOfLengthPrefixedElements(signerBlocks),
+                });
+    }
+
+    private static byte[] generateSignerBlock(
+            SignerConfig signerConfig,
+            Map<ContentDigestAlgorithm, byte[]> contentDigests)
+                    throws InvalidKeyException, SignatureException {
+        if (signerConfig.certificates.isEmpty()) {
+            throw new SignatureException("No certificates configured for signer");
+        }
+        PublicKey publicKey = signerConfig.certificates.get(0).getPublicKey();
+
+        byte[] encodedPublicKey = encodePublicKey(publicKey);
+
+        V2SignatureSchemeBlock.SignedData signedData = new V2SignatureSchemeBlock.SignedData();
+        try {
+            signedData.certificates = encodeCertificates(signerConfig.certificates);
+        } catch (CertificateEncodingException e) {
+            throw new SignatureException("Failed to encode certificates", e);
+        }
+
+        List<Pair<Integer, byte[]>> digests =
+                new ArrayList<>(signerConfig.signatureAlgorithms.size());
+        for (SignatureAlgorithm signatureAlgorithm : signerConfig.signatureAlgorithms) {
+            ContentDigestAlgorithm contentDigestAlgorithm =
+                    signatureAlgorithm.getContentDigestAlgorithm();
+            byte[] contentDigest = contentDigests.get(contentDigestAlgorithm);
+            if (contentDigest == null) {
+                throw new RuntimeException(
+                        contentDigestAlgorithm + " content digest for " + signatureAlgorithm
+                                + " not computed");
+            }
+            digests.add(Pair.of(signatureAlgorithm.getId(), contentDigest));
+        }
+        signedData.digests = digests;
+
+        V2SignatureSchemeBlock.Signer signer = new V2SignatureSchemeBlock.Signer();
+        // FORMAT:
+        // * length-prefixed sequence of length-prefixed digests:
+        //   * uint32: signature algorithm ID
+        //   * length-prefixed bytes: digest of contents
+        // * length-prefixed sequence of certificates:
+        //   * length-prefixed bytes: X.509 certificate (ASN.1 DER encoded).
+        // * length-prefixed sequence of length-prefixed additional attributes:
+        //   * uint32: ID
+        //   * (length - 4) bytes: value
+        signer.signedData = encodeAsSequenceOfLengthPrefixedElements(new byte[][] {
+            encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(signedData.digests),
+            encodeAsSequenceOfLengthPrefixedElements(signedData.certificates),
+            // additional attributes
+            new byte[0],
+        });
+        signer.publicKey = encodedPublicKey;
+        signer.signatures = new ArrayList<>(signerConfig.signatureAlgorithms.size());
+        for (SignatureAlgorithm signatureAlgorithm : signerConfig.signatureAlgorithms) {
+            Pair<String, ? extends AlgorithmParameterSpec> sigAlgAndParams =
+                    signatureAlgorithm.getJcaSignatureAlgorithmAndParams();
+            String jcaSignatureAlgorithm = sigAlgAndParams.getFirst();
+            AlgorithmParameterSpec jcaSignatureAlgorithmParams = sigAlgAndParams.getSecond();
+            byte[] signatureBytes;
+            try {
+                Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+                signature.initSign(signerConfig.privateKey);
+                if (jcaSignatureAlgorithmParams != null) {
+                    signature.setParameter(jcaSignatureAlgorithmParams);
+                }
+                signature.update(signer.signedData);
+                signatureBytes = signature.sign();
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Failed sign using " + jcaSignatureAlgorithm, e);
+            } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+                    | SignatureException e) {
+                throw new SignatureException("Failed sign using " + jcaSignatureAlgorithm, e);
+            }
+
+            try {
+                Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+                signature.initVerify(publicKey);
+                if (jcaSignatureAlgorithmParams != null) {
+                    signature.setParameter(jcaSignatureAlgorithmParams);
+                }
+                signature.update(signer.signedData);
+                if (!signature.verify(signatureBytes)) {
+                    throw new SignatureException("Signature did not verify");
+                }
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Failed to verify generated " + jcaSignatureAlgorithm
+                        + " signature using public key from certificate", e);
+            } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+                    | SignatureException e) {
+                throw new SignatureException("Failed to verify generated " + jcaSignatureAlgorithm
+                        + " signature using public key from certificate", e);
+            }
+
+            signer.signatures.add(Pair.of(signatureAlgorithm.getId(), signatureBytes));
+        }
+
+        // FORMAT:
+        // * length-prefixed signed data
+        // * length-prefixed sequence of length-prefixed signatures:
+        //   * uint32: signature algorithm ID
+        //   * length-prefixed bytes: signature of signed data
+        // * length-prefixed bytes: public key (X.509 SubjectPublicKeyInfo, ASN.1 DER encoded)
+        return encodeAsSequenceOfLengthPrefixedElements(
+                new byte[][] {
+                    signer.signedData,
+                    encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+                            signer.signatures),
+                    signer.publicKey,
+                });
+    }
+
+    private static final class V2SignatureSchemeBlock {
+        private static final class Signer {
+            public byte[] signedData;
+            public List<Pair<Integer, byte[]>> signatures;
+            public byte[] publicKey;
+        }
+
+        private static final class SignedData {
+            public List<Pair<Integer, byte[]>> digests;
+            public List<byte[]> certificates;
+        }
+    }
+
+    private static byte[] encodePublicKey(PublicKey publicKey) throws InvalidKeyException {
+        byte[] encodedPublicKey = null;
+        if ("X.509".equals(publicKey.getFormat())) {
+            encodedPublicKey = publicKey.getEncoded();
+        }
+        if (encodedPublicKey == null) {
+            try {
+                encodedPublicKey =
+                        KeyFactory.getInstance(publicKey.getAlgorithm())
+                                .getKeySpec(publicKey, X509EncodedKeySpec.class)
+                                .getEncoded();
+            } catch (NoSuchAlgorithmException e) {
+                throw new InvalidKeyException(
+                        "Failed to obtain X.509 encoded form of public key " + publicKey
+                                + " of class " + publicKey.getClass().getName(),
+                        e);
+            } catch (InvalidKeySpecException e) {
+                throw new InvalidKeyException(
+                        "Failed to obtain X.509 encoded form of public key " + publicKey
+                                + " of class " + publicKey.getClass().getName(),
+                        e);
+            }
+        }
+        if ((encodedPublicKey == null) || (encodedPublicKey.length == 0)) {
+            throw new InvalidKeyException(
+                    "Failed to obtain X.509 encoded form of public key " + publicKey
+                            + " of class " + publicKey.getClass().getName());
+        }
+        return encodedPublicKey;
+    }
+
+    private static List<byte[]> encodeCertificates(List<X509Certificate> certificates)
+            throws CertificateEncodingException {
+        List<byte[]> result = new ArrayList<>(certificates.size());
+        for (X509Certificate certificate : certificates) {
+            result.add(certificate.getEncoded());
+        }
+        return result;
+    }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedElements(List<byte[]> sequence) {
+        return encodeAsSequenceOfLengthPrefixedElements(
+                sequence.toArray(new byte[sequence.size()][]));
+    }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedElements(byte[][] sequence) {
+        int payloadSize = 0;
+        for (byte[] element : sequence) {
+            payloadSize += 4 + element.length;
+        }
+        ByteBuffer result = ByteBuffer.allocate(payloadSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        for (byte[] element : sequence) {
+            result.putInt(element.length);
+            result.put(element);
+        }
+        return result.array();
+      }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+            List<Pair<Integer, byte[]>> sequence) {
+        int resultSize = 0;
+        for (Pair<Integer, byte[]> element : sequence) {
+            resultSize += 12 + element.getSecond().length;
+        }
+        ByteBuffer result = ByteBuffer.allocate(resultSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        for (Pair<Integer, byte[]> element : sequence) {
+            byte[] second = element.getSecond();
+            result.putInt(8 + second.length);
+            result.putInt(element.getFirst());
+            result.putInt(second.length);
+            result.put(second);
+        }
+        return result.array();
+    }
+
+    private static ByteBuffer copyToByteBuffer(DataSource dataSource) throws IOException {
+        long dataSourceSize = dataSource.size();
+        if (dataSourceSize > Integer.MAX_VALUE) {
+            throw new IllegalArgumentException("Data source too large: " + dataSourceSize);
+        }
+        ByteBuffer result = ByteBuffer.allocate((int) dataSourceSize);
+        dataSource.feed(0, result.remaining(), new ByteBufferSink(result));
+        result.position(0);
+        return result;
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/ManifestWriter.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/ManifestWriter.java
new file mode 100644
index 0000000..449953a
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/ManifestWriter.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.jar;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.jar.Attributes;
+
+/**
+ * Producer of {@code META-INF/MANIFEST.MF} file.
+ */
+public abstract class ManifestWriter {
+
+    private static final byte[] CRLF = new byte[] {'\r', '\n'};
+    private static final int MAX_LINE_LENGTH = 70;
+
+    private ManifestWriter() {}
+
+    public static void writeMainSection(OutputStream out, Attributes attributes)
+            throws IOException {
+
+        // Main section must start with the Manifest-Version attribute.
+        // See https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Signed_JAR_File.
+        String manifestVersion = attributes.getValue(Attributes.Name.MANIFEST_VERSION);
+        if (manifestVersion == null) {
+            throw new IllegalArgumentException(
+                    "Mandatory " + Attributes.Name.MANIFEST_VERSION + " attribute missing");
+        }
+        writeAttribute(out, Attributes.Name.MANIFEST_VERSION, manifestVersion);
+
+        if (attributes.size() > 1) {
+            SortedMap<String, String> namedAttributes = getAttributesSortedByName(attributes);
+            namedAttributes.remove(Attributes.Name.MANIFEST_VERSION.toString());
+            writeAttributes(out, namedAttributes);
+        }
+        writeSectionDelimiter(out);
+    }
+
+    public static void writeIndividualSection(OutputStream out, String name, Attributes attributes)
+            throws IOException {
+        writeAttribute(out, "Name", name);
+
+        if (!attributes.isEmpty()) {
+            writeAttributes(out, getAttributesSortedByName(attributes));
+        }
+        writeSectionDelimiter(out);
+    }
+
+    static void writeSectionDelimiter(OutputStream out) throws IOException {
+        out.write(CRLF);
+    }
+
+    static void writeAttribute(OutputStream  out, Attributes.Name name, String value)
+            throws IOException {
+        writeAttribute(out, name.toString(), value);
+    }
+
+    private static void writeAttribute(OutputStream  out, String name, String value)
+            throws IOException {
+        writeLine(out, name + ": " + value);
+    }
+
+    private static void writeLine(OutputStream  out, String line) throws IOException {
+        byte[] lineBytes = line.getBytes("UTF-8");
+        int offset = 0;
+        int remaining = lineBytes.length;
+        boolean firstLine = true;
+        while (remaining > 0) {
+            int chunkLength;
+            if (firstLine) {
+                // First line
+                chunkLength = Math.min(remaining, MAX_LINE_LENGTH);
+            } else {
+                // Continuation line
+                out.write(CRLF);
+                out.write(' ');
+                chunkLength = Math.min(remaining, MAX_LINE_LENGTH - 1);
+            }
+            out.write(lineBytes, offset, chunkLength);
+            offset += chunkLength;
+            remaining -= chunkLength;
+            firstLine = false;
+        }
+        out.write(CRLF);
+    }
+
+    static SortedMap<String, String> getAttributesSortedByName(Attributes attributes) {
+        Set<Map.Entry<Object, Object>> attributesEntries = attributes.entrySet();
+        SortedMap<String, String> namedAttributes = new TreeMap<String, String>();
+        for (Map.Entry<Object, Object> attribute : attributesEntries) {
+            String attrName = attribute.getKey().toString();
+            String attrValue = attribute.getValue().toString();
+            namedAttributes.put(attrName, attrValue);
+        }
+        return namedAttributes;
+    }
+
+    static void writeAttributes(
+            OutputStream out, SortedMap<String, String> attributesSortedByName) throws IOException {
+        for (Map.Entry<String, String> attribute : attributesSortedByName.entrySet()) {
+            String attrName = attribute.getKey();
+            String attrValue = attribute.getValue();
+            writeAttribute(out, attrName, attrValue);
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/SignatureFileWriter.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/SignatureFileWriter.java
new file mode 100644
index 0000000..9cd25f3
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/jar/SignatureFileWriter.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.jar;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.SortedMap;
+import java.util.jar.Attributes;
+
+/**
+ * Producer of JAR signature file ({@code *.SF}).
+ */
+public abstract class SignatureFileWriter {
+    private SignatureFileWriter() {}
+
+    public static void writeMainSection(OutputStream out, Attributes attributes)
+            throws IOException {
+
+        // Main section must start with the Signature-Version attribute.
+        // See https://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Signed_JAR_File.
+        String signatureVersion = attributes.getValue(Attributes.Name.SIGNATURE_VERSION);
+        if (signatureVersion == null) {
+            throw new IllegalArgumentException(
+                    "Mandatory " + Attributes.Name.SIGNATURE_VERSION + " attribute missing");
+        }
+        ManifestWriter.writeAttribute(out, Attributes.Name.SIGNATURE_VERSION, signatureVersion);
+
+        if (attributes.size() > 1) {
+            SortedMap<String, String> namedAttributes =
+                    ManifestWriter.getAttributesSortedByName(attributes);
+            namedAttributes.remove(Attributes.Name.SIGNATURE_VERSION.toString());
+            ManifestWriter.writeAttributes(out, namedAttributes);
+        }
+        writeSectionDelimiter(out);
+    }
+
+    public static void writeIndividualSection(OutputStream out, String name, Attributes attributes)
+            throws IOException {
+        ManifestWriter.writeIndividualSection(out, name, attributes);
+    }
+
+    public static void writeSectionDelimiter(OutputStream out) throws IOException {
+        ManifestWriter.writeSectionDelimiter(out);
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteArrayOutputStreamSink.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteArrayOutputStreamSink.java
new file mode 100644
index 0000000..ca79df7
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteArrayOutputStreamSink.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.util;
+
+import com.android.apksigner.core.util.DataSink;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+
+/**
+ * Data sink which stores all input data into an internal {@link ByteArrayOutputStream}, thus
+ * accepting an arbitrary amount of data.
+ */
+public class ByteArrayOutputStreamSink implements DataSink {
+
+    private final ByteArrayOutputStream mBuf = new ByteArrayOutputStream();
+
+    @Override
+    public void consume(byte[] buf, int offset, int length) {
+        mBuf.write(buf, offset, length);
+    }
+
+    @Override
+    public void consume(ByteBuffer buf) {
+        if (!buf.hasRemaining()) {
+            return;
+        }
+
+        if (buf.hasArray()) {
+            mBuf.write(
+                    buf.array(),
+                    buf.arrayOffset() + buf.position(),
+                    buf.remaining());
+            buf.position(buf.limit());
+        } else {
+            byte[] tmp = new byte[buf.remaining()];
+            buf.get(tmp);
+            mBuf.write(tmp, 0, tmp.length);
+        }
+    }
+
+    /**
+     * Returns the data received so far.
+     */
+    public byte[] getData() {
+        return mBuf.toByteArray();
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferDataSource.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferDataSource.java
new file mode 100644
index 0000000..76f4fda
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferDataSource.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.util;
+
+import com.android.apksigner.core.util.DataSink;
+import com.android.apksigner.core.util.DataSource;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * {@link DataSource} backed by a {@link ByteBuffer}.
+ */
+public class ByteBufferDataSource implements DataSource {
+
+    private final ByteBuffer mBuffer;
+    private final long mSize;
+
+    /**
+     * Constructs a new {@code ByteBufferDigestSource} based on the data contained in the provided
+     * buffer between the buffer's position and limit.
+     */
+    public ByteBufferDataSource(ByteBuffer buffer) {
+        mBuffer = buffer.slice();
+        mSize = buffer.remaining();
+    }
+
+    @Override
+    public long size() {
+        return mSize;
+    }
+
+    @Override
+    public void feed(long offset, int size, DataSink sink) throws IOException {
+        if (offset < 0) {
+            throw new IllegalArgumentException("offset: " + offset);
+        }
+        if (size < 0) {
+            throw new IllegalArgumentException("size: " + size);
+        }
+        if (offset > mSize) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") > source size (" + mSize + ")");
+        }
+        long endOffset = offset + size;
+        if (endOffset < offset) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") + size (" + size + ") overflow");
+        }
+        if (endOffset > mSize) {
+            throw new IllegalArgumentException(
+                    "offset (" + offset + ") + size (" + size + ") > source size (" + mSize  +")");
+        }
+
+        int chunkPosition = (int) offset; // safe to downcast because mSize <= Integer.MAX_VALUE
+        int chunkLimit = (int) endOffset; // safe to downcast because mSize <= Integer.MAX_VALUE
+        ByteBuffer chunk;
+        // Creating a slice of ByteBuffer modifies the state of the source ByteBuffer (position
+        // and limit fields, to be more specific). We thus use synchronization around these
+        // state-changing operations to make instances of this class thread-safe.
+        synchronized (mBuffer) {
+            // ByteBuffer.limit(int) and .position(int) check that that the position >= limit
+            // invariant is not broken. Thus, the only way to safely change position and limit
+            // without caring about their current values is to first set position to 0 or set the
+            // limit to capacity.
+            mBuffer.position(0);
+
+            mBuffer.limit(chunkLimit);
+            mBuffer.position(chunkPosition);
+            chunk = mBuffer.slice();
+        }
+
+        sink.consume(chunk);
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferSink.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferSink.java
new file mode 100644
index 0000000..8c57905
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/ByteBufferSink.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.util;
+
+import com.android.apksigner.core.util.DataSink;
+
+import java.io.IOException;
+import java.nio.BufferOverflowException;
+import java.nio.ByteBuffer;
+
+/**
+ * Data sink which stores all received data into the associated {@link ByteBuffer}.
+ */
+public class ByteBufferSink implements DataSink {
+
+    private final ByteBuffer mBuffer;
+
+    public ByteBufferSink(ByteBuffer buffer) {
+        mBuffer = buffer;
+    }
+
+    @Override
+    public void consume(byte[] buf, int offset, int length) throws IOException {
+        try {
+            mBuffer.put(buf, offset, length);
+        } catch (BufferOverflowException e) {
+            throw new IOException(
+                    "Insufficient space in output buffer for " + length + " bytes", e);
+        }
+    }
+
+    @Override
+    public void consume(ByteBuffer buf) throws IOException {
+        int length = buf.remaining();
+        try {
+            mBuffer.put(buf);
+        } catch (BufferOverflowException e) {
+            throw new IOException(
+                    "Insufficient space in output buffer for " + length + " bytes", e);
+        }
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/util/Pair.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/Pair.java
new file mode 100644
index 0000000..d59af41
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/util/Pair.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.util;
+
+/**
+ * Pair of two elements.
+ */
+public final class Pair<A, B> {
+    private final A mFirst;
+    private final B mSecond;
+
+    private Pair(A first, B second) {
+        mFirst = first;
+        mSecond = second;
+    }
+
+    public static <A, B> Pair<A, B> of(A first, B second) {
+        return new Pair<A, B>(first, second);
+    }
+
+    public A getFirst() {
+        return mFirst;
+    }
+
+    public B getSecond() {
+        return mSecond;
+    }
+
+    @Override
+    public int hashCode() {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((mFirst == null) ? 0 : mFirst.hashCode());
+        result = prime * result + ((mSecond == null) ? 0 : mSecond.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        @SuppressWarnings("rawtypes")
+        Pair other = (Pair) obj;
+        if (mFirst == null) {
+            if (other.mFirst != null) {
+                return false;
+            }
+        } else if (!mFirst.equals(other.mFirst)) {
+            return false;
+        }
+        if (mSecond == null) {
+            if (other.mSecond != null) {
+                return false;
+            }
+        } else if (!mSecond.equals(other.mSecond)) {
+            return false;
+        }
+        return true;
+    }
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/internal/zip/ZipUtils.java b/tools/apksigner/core/src/com/android/apksigner/core/internal/zip/ZipUtils.java
new file mode 100644
index 0000000..7b47e50
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/internal/zip/ZipUtils.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.internal.zip;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Assorted ZIP format helpers.
+ *
+ * <p>NOTE: Most helper methods operating on {@code ByteBuffer} instances expect that the byte
+ * order of these buffers is little-endian.
+ */
+public abstract class ZipUtils {
+    private ZipUtils() {}
+
+    private static final int ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET = 16;
+
+    /**
+     * Sets the offset of the start of the ZIP Central Directory in the archive.
+     *
+     * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+     */
+    public static void setZipEocdCentralDirectoryOffset(
+            ByteBuffer zipEndOfCentralDirectory, long offset) {
+        assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+        setUnsignedInt32(
+                zipEndOfCentralDirectory,
+                zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET,
+                offset);
+    }
+
+    private static void assertByteOrderLittleEndian(ByteBuffer buffer) {
+        if (buffer.order() != ByteOrder.LITTLE_ENDIAN) {
+            throw new IllegalArgumentException("ByteBuffer byte order must be little endian");
+        }
+    }
+
+    private static void setUnsignedInt32(ByteBuffer buffer, int offset, long value) {
+        if ((value < 0) || (value > 0xffffffffL)) {
+            throw new IllegalArgumentException("uint32 value of out range: " + value);
+        }
+        buffer.putInt(buffer.position() + offset, (int) value);
+    }
+}
\ No newline at end of file
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/util/DataSink.java b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSink.java
new file mode 100644
index 0000000..35a61fc
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSink.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.util;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/**
+ * Consumer of input data which may be provided in one go or in chunks.
+ */
+public interface DataSink {
+
+    /**
+     * Consumes the provided chunk of data.
+     *
+     * <p>This data sink guarantees to not hold references to the provided buffer after this method
+     * terminates.
+     */
+    void consume(byte[] buf, int offset, int length) throws IOException;
+
+    /**
+     * Consumes all remaining data in the provided buffer and advances the buffer's position
+     * to the buffer's limit.
+     *
+     * <p>This data sink guarantees to not hold references to the provided buffer after this method
+     * terminates.
+     */
+    void consume(ByteBuffer buf) throws IOException;
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/util/DataSource.java b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSource.java
new file mode 100644
index 0000000..04560cb
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSource.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.apksigner.core.util;
+
+import java.io.IOException;
+
+/**
+ * Abstract representation of a source of data.
+ *
+ * <p>This abstraction serves three purposes:
+ * <ul>
+ * <li>Transparent handling of different types of sources, such as {@code byte[]},
+ *     {@link java.nio.ByteBuffer}, {@link java.io.RandomAccessFile}, memory-mapped file.</li>
+ * <li>Support sources larger than 2 GB. If all sources were smaller than 2 GB, {@code ByteBuffer}
+ *     may have worked as the unifying abstraction.</li>
+ * <li>Support sources which do not fit into logical memory as a contiguous region.</li>
+ * </ul>
+ */
+public interface DataSource {
+
+    /**
+     * Returns the amount of data (in bytes) contained in this data source.
+     */
+    long size();
+
+    /**
+     * Feeds the specified chunk from this data source into the provided sink.
+     *
+     * @param offset index (in bytes) at which the chunk starts inside data source
+     * @param size size (in bytes) of the chunk
+     */
+    void feed(long offset, int size, DataSink sink) throws IOException;
+}
diff --git a/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java
new file mode 100644
index 0000000..978afae
--- /dev/null
+++ b/tools/apksigner/core/src/com/android/apksigner/core/util/DataSources.java
@@ -0,0 +1,23 @@
+package com.android.apksigner.core.util;
+
+import com.android.apksigner.core.internal.util.ByteBufferDataSource;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Utility methods for working with {@link DataSource} abstraction.
+ */
+public abstract class DataSources {
+    private DataSources() {}
+
+    /**
+     * Returns a {@link DataSource} backed by the provided {@link ByteBuffer}. The data source
+     * represents the data contained between the position and limit of the buffer.
+     */
+    public static DataSource asDataSource(ByteBuffer buffer) {
+        if (buffer == null) {
+            throw new NullPointerException();
+        }
+        return new ByteBufferDataSource(buffer);
+    }
+}
diff --git a/tools/brillo-clang-format b/tools/brillo-clang-format
new file mode 100644
index 0000000..a69d9d2
--- /dev/null
+++ b/tools/brillo-clang-format
@@ -0,0 +1,37 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+### DO NOT COPY THIS FILE TO YOUR PROJECT. ###
+
+#
+# This is the .clang-format file used by all Brillo projects, conforming to the
+# style guide defined by Brillo. To use this file create a *relative* symlink in
+# your project pointing to this file, as this repository is expected to be
+# present in all manifests.
+#
+# See go/brillo-c++-style for details about the style guide.
+#
+
+BasedOnStyle: Google
+AllowShortFunctionsOnASingleLine: Inline
+AllowShortIfStatementsOnASingleLine: false
+AllowShortLoopsOnASingleLine: false
+BinPackArguments: false
+BinPackParameters: false
+CommentPragmas: NOLINT:.*
+DerivePointerAlignment: false
+PointerAlignment: Left
+TabWidth: 2
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 34a3522..fb4a0c4 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -23,20 +23,38 @@
 
 include $(BUILD_HOST_EXECUTABLE)
 
-# To Build the custom target binary for the host to generate the fs_config
-# override files. The executable is hard coded to include the
-# $(TARGET_ANDROID_FILESYSTEM_CONFIG_H) file if it exists.
-# Expectations:
-#    device/<vendor>/<device>/android_filesystem_config.h
-#        fills in struct fs_path_config android_device_dirs[] and
-#                 struct fs_path_config android_device_files[]
-#    device/<vendor>/<device>/device.mk
-#        PRODUCT_PACKAGES += fs_config_dirs fs_config_files
-
-# If not specified, check if default one to be found
+# One can override the default android_filesystem_config.h file in one of two ways:
+#
+# 1. The old way:
+#   To Build the custom target binary for the host to generate the fs_config
+#   override files. The executable is hard coded to include the
+#   $(TARGET_ANDROID_FILESYSTEM_CONFIG_H) file if it exists.
+#   Expectations:
+#      device/<vendor>/<device>/android_filesystem_config.h
+#          fills in struct fs_path_config android_device_dirs[] and
+#                   struct fs_path_config android_device_files[]
+#      device/<vendor>/<device>/device.mk
+#          PRODUCT_PACKAGES += fs_config_dirs fs_config_files
+#   If not specified, check if default one to be found
+#
+# 2. The new way:
+#   set TARGET_FS_CONFIG_GEN to contain a list of intermediate format files
+#   for generating the android_filesystem_config.h file.
+#
+# More information can be found in the README
 ANDROID_FS_CONFIG_H := android_filesystem_config.h
 
 ifneq ($(TARGET_ANDROID_FILESYSTEM_CONFIG_H),)
+ifneq ($(TARGET_FS_CONFIG_GEN),)
+$(error Cannot set TARGET_ANDROID_FILESYSTEM_CONFIG_H and TARGET_FS_CONFIG_GEN simultaneously)
+endif
+
+# One and only one file can be specified.
+ifneq ($(words $(TARGET_ANDROID_FILESYSTEM_CONFIG_H)),1)
+$(error Multiple fs_config files specified, \
+ see "$(TARGET_ANDROID_FILESYSTEM_CONFIG_H)".)
+endif
+
 ifeq ($(filter %/$(ANDROID_FS_CONFIG_H),$(TARGET_ANDROID_FILESYSTEM_CONFIG_H)),)
 $(error TARGET_ANDROID_FILESYSTEM_CONFIG_H file name must be $(ANDROID_FS_CONFIG_H), \
  see "$(notdir $(TARGET_ANDROID_FILESYSTEM_CONFIG_H))".)
@@ -44,20 +62,43 @@
 
 my_fs_config_h := $(TARGET_ANDROID_FILESYSTEM_CONFIG_H)
 else ifneq ($(wildcard $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H)),)
+
+ifneq ($(TARGET_FS_CONFIG_GEN),)
+$(error Cannot provide $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H) and set TARGET_FS_CONFIG_GEN simultaneously)
+endif
 my_fs_config_h := $(TARGET_DEVICE_DIR)/$(ANDROID_FS_CONFIG_H)
+
 else
 my_fs_config_h := $(LOCAL_PATH)/default/$(ANDROID_FS_CONFIG_H)
 endif
 
+##################################
 include $(CLEAR_VARS)
 LOCAL_SRC_FILES := fs_config_generate.c
 LOCAL_MODULE := fs_config_generate_$(TARGET_DEVICE)
+LOCAL_MODULE_CLASS := EXECUTABLES
 LOCAL_SHARED_LIBRARIES := libcutils
 LOCAL_CFLAGS := -Werror -Wno-error=\#warnings
+
+ifneq ($(TARGET_FS_CONFIG_GEN),)
+gen := $(local-generated-sources-dir)/$(ANDROID_FS_CONFIG_H)
+$(gen): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
+$(gen): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN)
+$(gen): PRIVATE_CUSTOM_TOOL = $(PRIVATE_LOCAL_PATH)/fs_config_generator.py $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@
+$(gen): $(TARGET_FS_CONFIG_GEN) $(LOCAL_PATH)/fs_config_generator.py
+	$(transform-generated-source)
+
+LOCAL_GENERATED_SOURCES := $(gen)
+my_fs_config_h := $(gen)
+gen :=
+endif
+
 LOCAL_C_INCLUDES := $(dir $(my_fs_config_h))
+
 include $(BUILD_HOST_EXECUTABLE)
 fs_config_generate_bin := $(LOCAL_INSTALLED_MODULE)
 
+##################################
 # Generate the system/etc/fs_config_dirs binary file for the target
 # Add fs_config_dirs to PRODUCT_PACKAGES in the device make file to enable
 include $(CLEAR_VARS)
@@ -69,6 +110,7 @@
 	@mkdir -p $(dir $@)
 	$< -D -o $@
 
+##################################
 # Generate the system/etc/fs_config_files binary file for the target
 # Add fs_config_files to PRODUCT_PACKAGES in the device make file to enable
 include $(CLEAR_VARS)
diff --git a/tools/fs_config/README b/tools/fs_config/README
new file mode 100644
index 0000000..7eca4a3
--- /dev/null
+++ b/tools/fs_config/README
@@ -0,0 +1,119 @@
+ _____  _____  _____  _____  __  __  _____
+/  _  \/   __\/  _  \|  _  \/  \/  \/   __\
+|  _  <|   __||  _  ||  |  ||  \/  ||   __|
+\__|\_/\_____/\__|__/|_____/\__ \__/\_____/
+
+
+Generating the android_filesystem_config.h
+
+To generate the android_filesystem_config.h file, one can choose from
+one of two methods. The first method, is to declare
+TARGET_ANDROID_FILESYSTEM_CONFIG_H in the device BoardConfig.mk file. This
+variable can only have one item in it, and it is used directly as the
+android_filesystem_config.h header when building
+fs_config_generate_$(TARGET_DEVICE) which is used to generate fs_config_files
+and fs_config_dirs target executable.
+
+The limitation with this, is that it can only be set once, thus if the device
+has a make hierarchy, then each device needs its own file, and cannot share
+from a common source or that common source needs to include everything from
+both devices.
+
+The other way is to set TARGET_FS_CONFIG_GEN, which can be a list of
+intermediate fs configuration files. It is a build error on any one
+these conditions:
+ * Specify TARGET_FS_CONFIG_GEN and TARGET_ANDROID_FILESYSTEM_CONFIG_H
+ * Specify TARGET_FS_CONFIG_GEN and provide
+   $(TARGET_DEVICE_DIR)/android_filesystem_config.h
+
+The parsing of the config file follows the Python ConfigParser specification,
+with the sections and fields as defined below. There are two types of sections,
+both sections require all options to be specified. The first section type is
+the "caps" section.
+
+The "caps" section follows the following syntax:
+
+[path]
+mode: Octal file mode
+user: AID_<user>
+group: AID_<group>
+caps: cap*
+
+Where:
+
+[path]
+  The filesystem path to configure. A path ending in / is considered a dir,
+  else its a file.
+
+mode:
+  A valid octal file mode of at least 3 digits. If 3 is specified, it is
+  prefixed with a 0, else mode is used as is.
+
+user:
+  The exact, C define for a valid AID. Note custom AIDs can be defined in the
+  AID section documented below.
+
+group:
+  The exact, C define for a valid AID. Note custom AIDs can be defined in the
+  AID section documented below.
+
+caps:
+  The name as declared in
+  system/core/include/private/android_filesystem_capability.h without the
+  leading CAP_. Mixed case is allowed. Caps can also be the raw:
+   * binary (0b0101)
+   * octal (0455)
+   * int (42)
+   * hex (0xFF)
+  For multiple caps, just separate by whitespace.
+
+It is an error to specify multiple sections with the same [path]. Per the ini
+specifications enforced by Pythons ConfigParser.
+
+
+The next section type is the "AID" section, for specifying OEM specific AIDS.
+
+The AID section follows the following syntax:
+
+[AID_<name>]
+value: <number>
+
+Where:
+
+[AID_<name>]
+  The <name> can be any valid character for a #define identifier in C.
+
+value:
+  A valid C style number string. Hex, octal, binary and decimal are supported. See "caps"
+  above for more details on number formatting.
+
+It is an error to specify multiple sections with the same [AID_<name>]. Per the ini
+specifications enforced by Pythons ConfigParser. It is also an error to specify
+multiple sections with the same value option. It is also an error to specify a value
+that is outside of the inclusive OEM ranges:
+ * AID_OEM_RESERVED_START(2900) - AID_OEM_RESERVED_END(2999)
+ * AID_OEM_RESERVED_2_START(5000) - AID_OEM_RESERVED_2_END(5999)
+
+as defined by system/core/include/private/android_filesystem_config.h.
+
+Ordering within the TARGET_FS_CONFIG_GEN files is not relevant. The paths for files are sorted
+like so within their respective array definition:
+ * specified path before prefix match
+ ** ie foo before f*
+ * lexicographical less than before other
+ ** ie boo before foo
+
+Given these paths:
+
+paths=['ac', 'a', 'acd', 'an', 'a*', 'aa', 'ac*']
+
+The sort order would be:
+paths=['a', 'aa', 'ac', 'acd', 'an', 'ac*', 'a*']
+
+Thus the fs_config tools will match on specified paths before attempting prefix, and match on the
+longest matching prefix.
+
+The declared AIDS are sorted in ascending numerical order based on the option "value". The string
+representation of value is preserved. Both choices were made for maximum readability of the generated
+file and to line up files. Sync lines are placed with the source file as comments in the generated
+header file.
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
new file mode 100755
index 0000000..482c2bc
--- /dev/null
+++ b/tools/fs_config/fs_config_generator.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python
+
+import ConfigParser
+import re
+import sys
+
+
+GENERATED = '''
+/*
+ * THIS IS AN AUTOGENERATED FILE! DO NOT MODIFY
+ */
+'''
+
+INCLUDE = '#include <private/android_filesystem_config.h>'
+
+DEFINE_NO_DIRS = '#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS\n'
+DEFINE_NO_FILES = '#define NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES\n'
+
+DEFAULT_WARNING = '#warning No device-supplied android_filesystem_config.h, using empty default.'
+
+NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS_ENTRY = '{ 00000, AID_ROOT,      AID_ROOT,      0, "system/etc/fs_config_dirs" },'
+NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES_ENTRY = '{ 00000, AID_ROOT,      AID_ROOT,      0, "system/etc/fs_config_files" },'
+
+IFDEF_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS = '#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS'
+ENDIF = '#endif'
+
+OPEN_FILE_STRUCT = 'static const struct fs_path_config android_device_files[] = {'
+OPEN_DIR_STRUCT = 'static const struct fs_path_config android_device_dirs[] = {'
+CLOSE_FILE_STRUCT = '};'
+
+GENERIC_DEFINE = "#define %s\t%s"
+
+FILE_COMMENT = '// Defined in file: \"%s\"'
+
+# from system/core/include/private/android_filesystem_config.h
+AID_OEM_RESERVED_RANGES = [
+    (2900, 2999),
+    (5000, 5999),
+]
+
+
+AID_MATCH = re.compile('AID_[a-zA-Z]+')
+
+def handle_aid(file_name, section_name, config, aids, seen_aids):
+    value = config.get(section_name, 'value')
+
+    errmsg = '%s for: \"' + section_name + '" file: \"' + file_name + '\"'
+
+    if not value:
+        raise Exception(errmsg % 'Found specified but unset "value"')
+
+    v = convert_int(value)
+    if not v:
+        raise Exception(errmsg % ('Invalid "value", not a number, got: \"%s\"' % value))
+
+    # Values must be within OEM range
+    if not any(lower <= v <= upper for (lower, upper) in AID_OEM_RESERVED_RANGES):
+        s = '"value" not in valid range %s, got: %s'
+        s = s % (str(AID_OEM_RESERVED_RANGES), value)
+        raise Exception(errmsg % s)
+
+    # use the normalized int value in the dict and detect
+    # duplicate definitions of the same vallue
+    v = str(v)
+    if v in seen_aids[1]:
+        # map of value to aid name
+        a = seen_aids[1][v]
+
+        # aid name to file
+        f = seen_aids[0][a]
+
+        s = 'Duplicate AID value "%s" found on AID: "%s".' % (value, seen_aids[1][v])
+        s += ' Previous found in file: "%s."' % f
+        raise Exception(errmsg % s)
+
+    seen_aids[1][v] = section_name
+
+    # Append a tuple of (AID_*, base10(value), str(value))
+    # We keep the str version of value so we can print that out in the
+    # generated header so investigating parties can identify parts.
+    # We store the base10 value for sorting, so everything is ascending
+    # later.
+    aids.append((file_name, section_name, v, value))
+
+def convert_int(num):
+
+        try:
+            if num.startswith('0x'):
+                return int(num, 16)
+            elif num.startswith('0b'):
+                return int(num, 2)
+            elif num.startswith('0'):
+                return int(num, 8)
+            else:
+                return int(num, 10)
+        except ValueError:
+            pass
+        return None
+
+def handle_path(file_name, section_name, config, files, dirs):
+
+            mode = config.get(section_name, 'mode')
+            user = config.get(section_name, 'user')
+            group = config.get(section_name, 'group')
+            caps = config.get(section_name, 'caps')
+
+            errmsg = 'Found specified but unset option: \"%s" in file: \"' + file_name + '\"'
+
+            if not mode:
+                raise Exception(errmsg % 'mode')
+
+            if not user:
+                raise Exception(errmsg % 'user')
+
+            if not group:
+                raise Exception(errmsg % 'group')
+
+            if not caps:
+                raise Exception(errmsg % 'caps')
+
+            caps = caps.split()
+
+            tmp = []
+            for x in caps:
+                if convert_int(x):
+                    tmp.append('(' + x + ')')
+                else:
+                    tmp.append('(1ULL << CAP_' + x.upper() + ')')
+
+            caps = tmp
+
+            path = '"' + section_name + '"'
+
+            if len(mode) == 3:
+                mode = '0' + mode
+
+            try:
+                int(mode, 8)
+            except:
+                raise Exception('Mode must be octal characters, got: "' + mode + '"')
+
+            if len(mode) != 4:
+                raise Exception('Mode must be 3 or 4 characters, got: "' + mode + '"')
+
+
+            caps = '|'.join(caps)
+
+            x = [ mode, user, group, caps, section_name ]
+            if section_name[-1] == '/':
+                dirs.append((file_name, x))
+            else:
+                files.append((file_name, x))
+
+def handle_dup(name, file_name, section_name, seen):
+        if section_name in seen:
+            dups = '"' + seen[section_name] + '" and '
+            dups += file_name
+            raise Exception('Duplicate ' + name + ' "' + section_name + '" found in files: ' + dups)
+
+def parse(file_name, files, dirs, aids, seen_paths, seen_aids):
+
+        config = ConfigParser.ConfigParser()
+        config.read(file_name)
+
+        for s in config.sections():
+
+            if AID_MATCH.match(s) and config.has_option(s, 'value'):
+                handle_dup('AID', file_name, s, seen_aids[0])
+                seen_aids[0][s] = file_name
+                handle_aid(file_name, s, config, aids, seen_aids)
+            else:
+                handle_dup('path', file_name, s, seen_paths)
+                seen_paths[s] = file_name
+                handle_path(file_name, s, config, files, dirs)
+
+def generate(files, dirs, aids):
+    print GENERATED
+    print INCLUDE
+    print
+
+    are_dirs = len(dirs) > 0
+    are_files = len(files) > 0
+    are_aids = len(aids) > 0
+
+    if are_aids:
+        # sort on value of (file_name, name, value, strvalue)
+        aids.sort(key=lambda x: x[2])
+        for a in aids:
+            # use the preserved str value
+            print FILE_COMMENT % a[0]
+            print GENERIC_DEFINE % (a[1], a[2])
+
+        print
+
+    if not are_dirs:
+        print DEFINE_NO_DIRS
+
+    if not are_files:
+        print DEFINE_NO_FILES
+
+    if not are_files and not are_dirs and not are_aids:
+        print DEFAULT_WARNING
+        return
+
+    if are_files:
+        print OPEN_FILE_STRUCT
+        for tup in files:
+            f = tup[0]
+            c = tup[1]
+            c[4] = '"' + c[4] + '"'
+            c = '{ ' + '    ,'.join(c) + ' },'
+            print FILE_COMMENT % f
+            print '    ' + c
+
+        if not are_dirs:
+            print IFDEF_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
+            print '    ' + NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS_ENTRY
+            print ENDIF
+        print CLOSE_FILE_STRUCT
+
+    if are_dirs:
+        print OPEN_DIR_STRUCT
+        for d in dirs:
+            f[4] = '"' + f[4] + '"'
+            d = '{ ' + '    ,'.join(d) + ' },'
+            print '    ' + d
+
+        print CLOSE_FILE_STRUCT
+
+def file_key(x):
+
+    # Wrapper class for custom prefix matching strings
+    class S(object):
+        def __init__(self, str):
+
+            self.orig = str
+            self.is_prefix = str[-1] == '*'
+            if self.is_prefix:
+                self.str = str[:-1]
+            else:
+                self.str = str
+
+        def __lt__(self, other):
+
+            # if were both suffixed the smallest string
+            # is 'bigger'
+            if self.is_prefix and other.is_prefix:
+                b = len(self.str) > len(other.str)
+            # If I am an the suffix match, im bigger
+            elif self.is_prefix:
+                b = False
+            # If other is the suffix match, he's bigger
+            elif other.is_prefix:
+                b = True
+            # Alphabetical
+            else:
+                b = self.str < other.str
+            return b
+
+    return S(x[4])
+
+def main():
+
+    files = []
+    dirs = []
+    aids = []
+    seen_paths = {}
+
+    # (name to file, value to aid)
+    seen_aids = ({}, {})
+
+    for x in sys.argv[1:]:
+        parse(x, files, dirs, aids, seen_paths, seen_aids)
+
+    files.sort(key= lambda x: file_key(x[1]))
+    generate(files, dirs, aids)
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/ijar/classfile.cc b/tools/ijar/classfile.cc
index e0cf42e..d33e0db 100644
--- a/tools/ijar/classfile.cc
+++ b/tools/ijar/classfile.cc
@@ -123,7 +123,7 @@
 // See sec.4.4 of JVM spec.
 struct Constant {
 
-  Constant(u1 tag) :
+  explicit Constant(u1 tag) :
       slot_(0),
       tag_(tag) {}
 
@@ -180,7 +180,7 @@
 // See sec.4.4.1 of JVM spec.
 struct Constant_Class : Constant
 {
-  Constant_Class(u2 name_index) :
+  explicit Constant_Class(u2 name_index) :
       Constant(CONSTANT_Class),
       name_index_(name_index) {}
 
@@ -231,7 +231,7 @@
 // See sec.4.4.3 of JVM spec.
 struct Constant_String : Constant
 {
-  Constant_String(u2 string_index) :
+  explicit Constant_String(u2 string_index) :
       Constant(CONSTANT_String),
       string_index_(string_index) {}
 
@@ -360,7 +360,7 @@
 // See sec.4.4.9 of JVM spec.
 struct Constant_MethodType : Constant
 {
-  Constant_MethodType(u2 descriptor_index) :
+  explicit Constant_MethodType(u2 descriptor_index) :
       Constant(CONSTANT_MethodType),
       descriptor_index_(descriptor_index) {}
 
diff --git a/tools/ijar/zip.cc b/tools/ijar/zip.cc
index ca5f396..3aa06db 100644
--- a/tools/ijar/zip.cc
+++ b/tools/ijar/zip.cc
@@ -846,6 +846,7 @@
   memcpy(entry->file_name, filename, file_name_length_);
   entry->extra_field_length = 0;
   entry->extra_field = (const u1 *)"";
+  entry->crc32 = 0;
 
   // Output the ZIP local_file_header:
   put_u4le(q, LOCAL_FILE_HEADER_SIGNATURE);
diff --git a/tools/makeparallel/makeparallel.cpp b/tools/makeparallel/makeparallel.cpp
index 3c39846..c70fa9a 100644
--- a/tools/makeparallel/makeparallel.cpp
+++ b/tools/makeparallel/makeparallel.cpp
@@ -26,6 +26,7 @@
 #include <stdlib.h>
 #include <string.h>
 #include <unistd.h>
+#include <sys/resource.h>
 #include <sys/time.h>
 #include <sys/types.h>
 #include <sys/wait.h>
@@ -343,6 +344,15 @@
     // child
     unsetenv("MAKEFLAGS");
     unsetenv("MAKELEVEL");
+
+    // make 3.81 sets the stack ulimit to unlimited, which may cause problems
+    // for child processes
+    struct rlimit rlim{};
+    if (getrlimit(RLIMIT_STACK, &rlim) == 0 && rlim.rlim_cur == RLIM_INFINITY) {
+      rlim.rlim_cur = 8*1024*1024;
+      setrlimit(RLIMIT_STACK, &rlim);
+    }
+
     int ret = execvp(path, args.data());
     if (ret < 0) {
       error(errno, errno, "exec %s failed", path);
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 7cb9072..9e44263 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -31,7 +31,9 @@
 import datetime
 import errno
 import os
+import shlex
 import shutil
+import subprocess
 import tempfile
 import zipfile
 
@@ -48,12 +50,12 @@
 
 def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
   """Turn the contents of SYSTEM into a system image and store it in
-  output_zip."""
+  output_zip. Returns the name of the system image file."""
 
   prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "system.img")
   if os.path.exists(prebuilt_path):
     print "system.img already exists in %s, no need to rebuild..." % (prefix,)
-    return
+    return prebuilt_path
 
   def output_sink(fn, data):
     ofile = open(os.path.join(OPTIONS.input_tmp, "SYSTEM", fn), "w")
@@ -68,8 +70,23 @@
   block_list = common.MakeTempFile(prefix="system-blocklist-", suffix=".map")
   imgname = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict,
                         block_list=block_list)
+
+  # If requested, calculate and add dm-verity integrity hashes and
+  # metadata to system.img.
+  if OPTIONS.info_dict.get("board_bvb_enable", None) == "true":
+    bvbtool = os.getenv('BVBTOOL') or "bvbtool"
+    cmd = [bvbtool, "add_image_hashes", "--image", imgname]
+    args = OPTIONS.info_dict.get("board_bvb_add_image_hashes_args", None)
+    if args and args.strip():
+      cmd.extend(shlex.split(args))
+    p = common.Run(cmd, stdout=subprocess.PIPE)
+    p.communicate()
+    assert p.returncode == 0, "bvbtool add_image_hashes of %s image failed" % (
+      os.path.basename(OPTIONS.input_tmp),)
+
   common.ZipWrite(output_zip, imgname, prefix + "system.img")
   common.ZipWrite(output_zip, block_list, prefix + "system.map")
+  return imgname
 
 
 def BuildSystem(input_dir, info_dict, block_list=None):
@@ -211,6 +228,35 @@
   shutil.rmtree(temp_dir)
 
 
+def AddPartitionTable(output_zip, prefix="IMAGES/"):
+  """Create a partition table image and store it in output_zip."""
+
+  _, img_file_name = tempfile.mkstemp()
+  _, bpt_file_name = tempfile.mkstemp()
+
+  # use BPTTOOL from environ, or "bpttool" if empty or not set.
+  bpttool = os.getenv("BPTTOOL") or "bpttool"
+  cmd = [bpttool, "make_table", "--output_json", bpt_file_name,
+         "--output_gpt", img_file_name]
+  input_files_str = OPTIONS.info_dict["board_bpt_input_files"]
+  input_files = input_files_str.split(" ")
+  for i in input_files:
+    cmd.extend(["--input", i])
+  disk_size = OPTIONS.info_dict.get("board_bpt_disk_size")
+  if disk_size:
+    cmd.extend(["--disk_size", disk_size])
+  args = OPTIONS.info_dict.get("board_bpt_make_table_args")
+  if args:
+    cmd.extend(shlex.split(args))
+
+  p = common.Run(cmd, stdout=subprocess.PIPE)
+  p.communicate()
+  assert p.returncode == 0, "bpttool make_table failed"
+
+  common.ZipWrite(output_zip, img_file_name, prefix + "partition-table.img")
+  common.ZipWrite(output_zip, bpt_file_name, prefix + "partition-table.bpt")
+
+
 def AddCache(output_zip, prefix="IMAGES/"):
   """Create an empty cache image and store it in output_zip."""
 
@@ -275,23 +321,40 @@
                                compression=zipfile.ZIP_DEFLATED)
 
   has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true")
+  system_root_image = (OPTIONS.info_dict.get("system_root_image", None) == "true")
+  board_bvb_enable = (OPTIONS.info_dict.get("board_bvb_enable", None) == "true")
+
+  # Brillo Verified Boot is incompatible with certain
+  # configurations. Explicitly check for these.
+  if board_bvb_enable:
+    assert not has_recovery, "has_recovery incompatible with bvb"
+    assert not system_root_image, "system_root_image incompatible with bvb"
+    assert not OPTIONS.rebuild_recovery, "rebuild_recovery incompatible with bvb"
+    assert not has_vendor, "VENDOR images currently incompatible with bvb"
 
   def banner(s):
     print "\n\n++++ " + s + " ++++\n\n"
 
-  banner("boot")
   prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img")
   boot_image = None
   if os.path.exists(prebuilt_path):
+    banner("boot")
     print "boot.img already exists in IMAGES/, no need to rebuild..."
     if OPTIONS.rebuild_recovery:
       boot_image = common.GetBootableImage(
           "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
   else:
-    boot_image = common.GetBootableImage(
+    if board_bvb_enable:
+      # With Brillo Verified Boot, we need to build system.img before
+      # boot.img since the latter includes the dm-verity root hash and
+      # salt for the former.
+      pass
+    else:
+      banner("boot")
+      boot_image = common.GetBootableImage(
         "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-    if boot_image:
-      boot_image.AddToZip(output_zip)
+      if boot_image:
+        boot_image.AddToZip(output_zip)
 
   recovery_image = None
   if has_recovery:
@@ -310,7 +373,17 @@
         recovery_image.AddToZip(output_zip)
 
   banner("system")
-  AddSystem(output_zip, recovery_img=recovery_image, boot_img=boot_image)
+  system_img_path = AddSystem(
+    output_zip, recovery_img=recovery_image, boot_img=boot_image)
+  if OPTIONS.info_dict.get("board_bvb_enable", None) == "true":
+    # If we're using Brillo Verified Boot, we can now build boot.img
+    # given that we have system.img.
+    banner("boot")
+    boot_image = common.GetBootableImage(
+      "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT",
+      system_img_path=system_img_path)
+    if boot_image:
+      boot_image.AddToZip(output_zip)
   if has_vendor:
     banner("vendor")
     AddVendor(output_zip)
@@ -318,9 +391,13 @@
   AddUserdata(output_zip)
   banner("cache")
   AddCache(output_zip)
+  if OPTIONS.info_dict.get("board_bpt_enable", None) == "true":
+    banner("partition-table")
+    AddPartitionTable(output_zip)
 
-  # For devices using A/B update, copy over images from RADIO/ to IMAGES/ and
-  # make sure we have all the needed images ready under IMAGES/.
+  # For devices using A/B update, copy over images from RADIO/ and/or
+  # VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
+  # images ready under IMAGES/. All images should have '.img' as extension.
   ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
   if os.path.exists(ab_partitions):
     with open(ab_partitions, 'r') as f:
@@ -328,9 +405,17 @@
     for line in lines:
       img_name = line.strip() + ".img"
       img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+      img_vendor_dir = os.path.join(
+        OPTIONS.input_tmp, "VENDOR_IMAGES")
       if os.path.exists(img_radio_path):
         common.ZipWrite(output_zip, img_radio_path,
                         os.path.join("IMAGES", img_name))
+      else:
+        for root, _, files in os.walk(img_vendor_dir):
+          if img_name in files:
+            common.ZipWrite(output_zip, os.path.join(root, img_name),
+              os.path.join("IMAGES", img_name))
+            break
 
       # Zip spec says: All slashes MUST be forward slashes.
       img_path = 'IMAGES/' + img_name
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index ce60667..0e1953c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -15,9 +15,9 @@
 # limitations under the License.
 
 """
-Build image output_image_file from input_directory and properties_file.
+Build image output_image_file from input_directory, properties_file, and target_out_dir
 
-Usage:  build_image input_directory properties_file output_image_file
+Usage:  build_image input_directory properties_file output_image_file target_out_dir
 
 """
 import os
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 60f44db..da2d8db 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -494,8 +494,114 @@
   return data
 
 
+def _BuildBvbBootableImage(sourcedir, fs_config_file, system_img_path,
+                           info_dict=None, has_ramdisk=False):
+  """Build a bootable image compatible with Brillo Verified Boot from the
+  specified sourcedir.
+
+  Take a kernel, cmdline, system image path, and optionally a ramdisk
+  directory from the input (in 'sourcedir'), and turn them into a boot
+  image.  Return the image data, or None if sourcedir does not appear
+  to contains files for building the requested image.
+  """
+
+  def make_ramdisk():
+    ramdisk_img = tempfile.NamedTemporaryFile()
+
+    if os.access(fs_config_file, os.F_OK):
+      cmd = ["mkbootfs", "-f", fs_config_file,
+             os.path.join(sourcedir, "RAMDISK")]
+    else:
+      cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
+    p1 = Run(cmd, stdout=subprocess.PIPE)
+    p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
+
+    p2.wait()
+    p1.wait()
+    assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
+    assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
+
+    return ramdisk_img
+
+  if not os.access(os.path.join(sourcedir, "kernel"), os.F_OK):
+    return None
+
+  if has_ramdisk and not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK):
+    return None
+
+  if info_dict is None:
+    info_dict = OPTIONS.info_dict
+
+  img = tempfile.NamedTemporaryFile()
+
+  if has_ramdisk:
+    ramdisk_img = make_ramdisk()
+
+  # use BVBTOOL from environ, or "bvbtool" if empty or not set
+  bvbtool = os.getenv('BVBTOOL') or "bvbtool"
+
+  # First, create boot.img.
+  cmd = [bvbtool, "make_boot_image"]
+
+  fn = os.path.join(sourcedir, "cmdline")
+  if os.access(fn, os.F_OK):
+    cmd.append("--kernel_cmdline")
+    cmd.append(open(fn).read().rstrip("\n"))
+
+  cmd.extend(["--kernel", os.path.join(sourcedir, "kernel")])
+
+  if has_ramdisk:
+    cmd.extend(["--initrd", ramdisk_img.name])
+
+  cmd.extend(["--rootfs_with_hashes", system_img_path])
+
+  args = info_dict.get("board_bvb_make_boot_image_args", None)
+  if args and args.strip():
+    cmd.extend(shlex.split(args))
+
+  rollback_index = info_dict.get("board_bvb_rollback_index", None)
+  if rollback_index and rollback_index.strip():
+    cmd.extend(["--rollback_index", rollback_index.strip()])
+
+  cmd.extend(["--output", img.name])
+
+  p = Run(cmd, stdout=subprocess.PIPE)
+  p.communicate()
+  assert p.returncode == 0, "bvbtool make_boot_image of %s image failed" % (
+      os.path.basename(sourcedir),)
+
+  # Then, sign boot.img.
+  cmd = [bvbtool, "sign_boot_image", "--image", img.name]
+
+  algorithm = info_dict.get("board_bvb_algorithm", None)
+  key_path = info_dict.get("board_bvb_key_path", None)
+  if algorithm and algorithm.strip() and key_path and key_path.strip():
+    cmd.extend(["--algorithm", algorithm, "--key", key_path])
+  else:
+    cmd.extend(["--algorithm", "SHA256_RSA4096"])
+    cmd.extend(["--key", "system/bvb/test/testkey_rsa4096.pem"])
+
+  args = info_dict.get("board_bvb_sign_boot_image_args", None)
+  if args and args.strip():
+    cmd.extend(shlex.split(args))
+
+  p = Run(cmd, stdout=subprocess.PIPE)
+  p.communicate()
+  assert p.returncode == 0, "bvbtool sign_boot_image of %s image failed" % (
+      os.path.basename(sourcedir),)
+
+  img.seek(os.SEEK_SET, 0)
+  data = img.read()
+
+  if has_ramdisk:
+    ramdisk_img.close()
+  img.close()
+
+  return data
+
+
 def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
-                     info_dict=None):
+                     info_dict=None, system_img_path=None):
   """Return a File object with the desired bootable image.
 
   Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name',
@@ -525,9 +631,14 @@
                  info_dict.get("recovery_as_boot") == "true")
 
   fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
-  data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
-                             os.path.join(unpack_dir, fs_config),
-                             info_dict, has_ramdisk)
+  if info_dict.get("board_bvb_enable", None) == "true":
+    data = _BuildBvbBootableImage(os.path.join(unpack_dir, tree_subdir),
+                                  os.path.join(unpack_dir, fs_config),
+                                  system_img_path, info_dict, has_ramdisk)
+  else:
+    data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
+                               os.path.join(unpack_dir, fs_config),
+                               info_dict, has_ramdisk)
   if data:
     return File(name, data)
   return None
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 57f8cda..ecdc167 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -118,6 +118,17 @@
                " or ".join(fp))
     self.script.append(cmd)
 
+  def AssertFingerprintOrThumbprint(self, fp, tp):
+    """Assert that the current recovery build fingerprint is fp, or thumbprint
+       is tp."""
+    cmd = ('getprop("ro.build.fingerprint") == "{fp}" ||\n'
+           '    getprop("ro.build.thumbprint") == "{tp}" ||\n'
+           '    abort("Package expects build fingerprint of {fp} or '
+           'thumbprint of {tp}; this device has a fingerprint of " '
+           '+ getprop("ro.build.fingerprint") and a thumbprint of " '
+           '+ getprop("ro.build.thumbprint") + ".");').format(fp=fp, tp=tp)
+    self.script.append(cmd)
+
   def AssertOlderBuild(self, timestamp, timestamp_text):
     """Assert that the build on the device is older (or the same as)
     the given timestamp."""
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 861c485..582412a 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -439,7 +439,7 @@
 
 def AppendAssertions(script, info_dict, oem_dict=None):
   oem_props = info_dict.get("oem_fingerprint_properties")
-  if oem_props is None or len(oem_props) == 0:
+  if not oem_props:
     device = GetBuildProp("ro.product.device", info_dict)
     script.AssertDevice(device)
   else:
@@ -528,10 +528,10 @@
   # in the target build.
   script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
+  if oem_props:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
     if not OPTIONS.oem_no_mount:
@@ -539,9 +539,9 @@
     oem_dict = common.LoadDictionaryFromLines(
         open(OPTIONS.oem_source).readlines())
 
+  target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.info_dict)
   metadata = {
-      "post-build": CalculateFingerprint(oem_props, oem_dict,
-                                         OPTIONS.info_dict),
+      "post-build": target_fp,
       "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
                                    OPTIONS.info_dict),
       "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
@@ -610,8 +610,7 @@
 """ % bcb_dev)
 
   # Dump fingerprints
-  script.Print("Target: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.info_dict))
+  script.Print("Target: %s" % target_fp)
 
   device_specific.FullOTA_InstallBegin()
 
@@ -773,17 +772,18 @@
   target_version = OPTIONS.target_info_dict["recovery_api_version"]
 
   if source_version == 0:
-    print ("WARNING: generating edify script for a source that "
-           "can't install it.")
+    print("WARNING: generating edify script for a source that "
+          "can't install it.")
   script = edify_generator.EdifyGenerator(
       source_version, OPTIONS.target_info_dict,
       fstab=OPTIONS.source_info_dict["fstab"])
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.source_info_dict.get(
       "recovery_mount_options")
+  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
+  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
   oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
+  if source_oem_props or target_oem_props:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
     if not OPTIONS.oem_no_mount:
@@ -792,8 +792,8 @@
         open(OPTIONS.oem_source).readlines())
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
-                                   OPTIONS.source_info_dict),
+      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
+                                   oem_dict, OPTIONS.source_info_dict),
       "ota-type": "BLOCK",
   }
 
@@ -828,9 +828,9 @@
       metadata=metadata,
       info_dict=OPTIONS.source_info_dict)
 
-  source_fp = CalculateFingerprint(oem_props, oem_dict,
+  source_fp = CalculateFingerprint(source_oem_props, oem_dict,
                                    OPTIONS.source_info_dict)
-  target_fp = CalculateFingerprint(oem_props, oem_dict,
+  target_fp = CalculateFingerprint(target_oem_props, oem_dict,
                                    OPTIONS.target_info_dict)
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
@@ -926,32 +926,39 @@
 """ % bcb_dev)
 
   # Dump fingerprints
-  script.Print("Source: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.source_info_dict))
-  script.Print("Target: %s" % CalculateFingerprint(
-      oem_props, oem_dict, OPTIONS.target_info_dict))
+  script.Print(source_fp)
+  script.Print(target_fp)
 
   script.Print("Verifying current system...")
 
   device_specific.IncrementalOTA_VerifyBegin()
 
-  if oem_props is None:
-    # When blockimgdiff version is less than 3 (non-resumable block-based OTA),
-    # patching on a device that's already on the target build will damage the
-    # system. Because operations like move don't check the block state, they
-    # always apply the changes unconditionally.
-    if blockimgdiff_version <= 2:
+  # When blockimgdiff version is less than 3 (non-resumable block-based OTA),
+  # patching on a device that's already on the target build will damage the
+  # system. Because operations like move don't check the block state, they
+  # always apply the changes unconditionally.
+  if blockimgdiff_version <= 2:
+    if source_oem_props is None:
       script.AssertSomeFingerprint(source_fp)
     else:
-      script.AssertSomeFingerprint(source_fp, target_fp)
-  else:
-    if blockimgdiff_version <= 2:
       script.AssertSomeThumbprint(
           GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
-    else:
+
+  else: # blockimgdiff_version > 2
+    if source_oem_props is None and target_oem_props is None:
+      script.AssertSomeFingerprint(source_fp, target_fp)
+    elif source_oem_props is not None and target_oem_props is not None:
       script.AssertSomeThumbprint(
           GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
           GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+    elif source_oem_props is None and target_oem_props is not None:
+      script.AssertFingerprintOrThumbprint(
+          source_fp,
+          GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
+    else:
+      script.AssertFingerprintOrThumbprint(
+          target_fp,
+          GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
 
   # Check the required cache size (i.e. stashed blocks).
   size = []
@@ -1074,10 +1081,11 @@
   recovery_mount_options = OPTIONS.info_dict.get(
       "recovery_mount_options")
   oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
+  if oem_props:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
-    script.Mount("/oem", recovery_mount_options)
+    if not OPTIONS.oem_no_mount:
+      script.Mount("/oem", recovery_mount_options)
     oem_dict = common.LoadDictionaryFromLines(
         open(OPTIONS.oem_source).readlines())
 
@@ -1416,17 +1424,18 @@
   target_version = OPTIONS.target_info_dict["recovery_api_version"]
 
   if source_version == 0:
-    print ("WARNING: generating edify script for a source that "
-           "can't install it.")
+    print("WARNING: generating edify script for a source that "
+          "can't install it.")
   script = edify_generator.EdifyGenerator(
       source_version, OPTIONS.target_info_dict,
       fstab=OPTIONS.source_info_dict["fstab"])
 
-  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
   recovery_mount_options = OPTIONS.source_info_dict.get(
       "recovery_mount_options")
+  source_oem_props = OPTIONS.source_info_dict.get("oem_fingerprint_properties")
+  target_oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
   oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
+  if source_oem_props or target_oem_props:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
     if not OPTIONS.oem_no_mount:
@@ -1435,8 +1444,8 @@
         open(OPTIONS.oem_source).readlines())
 
   metadata = {
-      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
-                                   OPTIONS.source_info_dict),
+      "pre-device": GetOemProperty("ro.product.device", source_oem_props,
+                                   oem_dict, OPTIONS.source_info_dict),
       "ota-type": "FILE",
   }
 
@@ -1479,17 +1488,25 @@
   else:
     vendor_diff = None
 
-  target_fp = CalculateFingerprint(oem_props, oem_dict,
+  target_fp = CalculateFingerprint(target_oem_props, oem_dict,
                                    OPTIONS.target_info_dict)
-  source_fp = CalculateFingerprint(oem_props, oem_dict,
+  source_fp = CalculateFingerprint(source_oem_props, oem_dict,
                                    OPTIONS.source_info_dict)
 
-  if oem_props is None:
+  if source_oem_props is None and target_oem_props is None:
     script.AssertSomeFingerprint(source_fp, target_fp)
-  else:
+  elif source_oem_props is not None and target_oem_props is not None:
     script.AssertSomeThumbprint(
         GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
         GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_fp,
+        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_fp,
+        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
 
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index aa572cc..1638f8c 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -71,6 +71,19 @@
     """
     return cls(text)
 
+  @classmethod
+  def parse_raw(cls, text):
+    """Parse a string generated by RangeSet.to_string_raw().
+
+    >>> RangeSet.parse_raw(RangeSet("0-9").to_string_raw())
+    <RangeSet("0-9")>
+    """
+
+    raw = [int(i) for i in text.split(',')]
+    assert raw[0] == len(raw[1:]), "Invalid raw string."
+
+    return cls(data=raw[1:])
+
   def _parse_internal(self, text):
     data = []
     last = -1
diff --git a/tools/releasetools/target_files_diff.py b/tools/releasetools/target_files_diff.py
index 0f717e0..7415f27 100755
--- a/tools/releasetools/target_files_diff.py
+++ b/tools/releasetools/target_files_diff.py
@@ -51,10 +51,6 @@
               'RECOVERY/RAMDISK/selinux_version']:
     return True
 
-  # b/26956807 .odex files are not deterministic
-  if name.endswith('.odex'):
-    return True
-
   return False
 
 
diff --git a/tools/releasetools/test_rangelib.py b/tools/releasetools/test_rangelib.py
index a61a64e..1c57cbc 100644
--- a/tools/releasetools/test_rangelib.py
+++ b/tools/releasetools/test_rangelib.py
@@ -124,3 +124,17 @@
     self.assertTrue(RangeSet(data=[2, 9, 30, 31, 31, 32, 35, 36]).monotonic)
     self.assertTrue(RangeSet(data=[0, 5, 5, 10]).monotonic)
     self.assertFalse(RangeSet(data=[5, 10, 0, 5]).monotonic)
+
+  def test_parse_raw(self):
+    self.assertEqual(
+        RangeSet.parse_raw(RangeSet("0-9").to_string_raw()),
+        RangeSet("0-9"))
+    self.assertEqual(RangeSet.parse_raw(
+        RangeSet("2-10 12").to_string_raw()),
+        RangeSet("2-10 12"))
+    self.assertEqual(
+        RangeSet.parse_raw(RangeSet("11 2-10 12 1 0").to_string_raw()),
+        RangeSet("11 2-10 12 1 0"))
+
+    with self.assertRaises(AssertionError):
+      RangeSet.parse_raw("4,0,10")
diff --git a/tools/rgb2565/to565.c b/tools/rgb2565/to565.c
index abf9cdb..94d62ef 100644
--- a/tools/rgb2565/to565.c
+++ b/tools/rgb2565/to565.c
@@ -65,11 +65,11 @@
         out = to565(rb, gb, bb);
         write(1, &out, 2);
 
-#define apply_error(ch) {                                               \
-            next_error[(i-1)*3+ch] += e * 3 / 16;                       \
-            next_error[(i)*3+ch] += e * 5 / 16;                         \
-            next_error[(i+1)*3+ch] += e * 1 / 16;                       \
-            error[(i+1)*3+ch] += e - ((e*1/16) + (e*3/16) + (e*5/16));  \
+#define apply_error(ch) {                                                \
+            next_error[(i-1)*3+(ch)] += e * 3 / 16;                      \
+            next_error[(i)*3+(ch)] += e * 5 / 16;                        \
+            next_error[(i+1)*3+(ch)] += e * 1 / 16;                      \
+            error[(i+1)*3+(ch)] += e - ((e*1/16) + (e*3/16) + (e*5/16)); \
         }
 
         e = r - from565_r(out);
diff --git a/tools/signapk/src/com/android/signapk/ApkSignerV2.java b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
index 46cd11e..7b617db 100644
--- a/tools/signapk/src/com/android/signapk/ApkSignerV2.java
+++ b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
@@ -75,7 +75,6 @@
     public static final int SIGNATURE_ECDSA_WITH_SHA256 = 0x0201;
     public static final int SIGNATURE_ECDSA_WITH_SHA512 = 0x0202;
     public static final int SIGNATURE_DSA_WITH_SHA256 = 0x0301;
-    public static final int SIGNATURE_DSA_WITH_SHA512 = 0x0302;
 
     /**
      * {@code .SF} file header section attribute indicating that the APK is signed not just with
@@ -660,8 +659,6 @@
                 return Pair.create("SHA512withECDSA", null);
             case SIGNATURE_DSA_WITH_SHA256:
                 return Pair.create("SHA256withDSA", null);
-            case SIGNATURE_DSA_WITH_SHA512:
-                return Pair.create("SHA512withDSA", null);
             default:
                 throw new IllegalArgumentException(
                         "Unknown signature algorithm: 0x"
@@ -679,7 +676,6 @@
             case SIGNATURE_RSA_PSS_WITH_SHA512:
             case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512:
             case SIGNATURE_ECDSA_WITH_SHA512:
-            case SIGNATURE_DSA_WITH_SHA512:
                 return CONTENT_DIGEST_CHUNKED_SHA512;
             default:
                 throw new IllegalArgumentException(
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index ba84b42..1df6b80 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -82,6 +82,7 @@
 import java.util.jar.JarOutputStream;
 import java.util.jar.Manifest;
 import java.util.regex.Pattern;
+
 import javax.crypto.Cipher;
 import javax.crypto.EncryptedPrivateKeyInfo;
 import javax.crypto.SecretKeyFactory;
@@ -126,24 +127,42 @@
     private static final String APK_SIG_SCHEME_V2_DIGEST_ALGORITHM = "SHA-256";
 
     /**
-     * Minimum Android SDK API Level which accepts JAR signatures which use SHA-256. Older platform
-     * versions accept only SHA-1 signatures.
+     * Returns the digest algorithm ID (one of {@code USE_SHA1} or {@code USE_SHA256}) to be used
+     * for v1 signing (JAR signing) an APK using the private key corresponding to the provided
+     * certificate.
+     *
+     * @param minSdkVersion minimum Android platform API Level supported by the APK (see
+     *        minSdkVersion attribute in AndroidManifest.xml). The higher the minSdkVersion, the
+     *        stronger hash may be used for signing the APK.
      */
-    private static final int MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES = 18;
+    private static int getV1DigestAlgorithmForApk(X509Certificate cert, int minSdkVersion) {
+        String keyAlgorithm = cert.getPublicKey().getAlgorithm();
+        if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+            // RSA can be used only with SHA-1 prior to API Level 18.
+            return (minSdkVersion < 18) ? USE_SHA1 : USE_SHA256;
+        } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+            // ECDSA cannot be used prior to API Level 18 at all. It can only be used with SHA-1
+            // on API Levels 18, 19, and 20.
+            if (minSdkVersion < 18) {
+                throw new IllegalArgumentException(
+                        "ECDSA signatures only supported for minSdkVersion 18 and higher");
+            }
+            return (minSdkVersion < 21) ? USE_SHA1 : USE_SHA256;
+        } else {
+            throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
+        }
+    }
 
     /**
-     * Return one of USE_SHA1 or USE_SHA256 according to the signature
-     * algorithm specified in the cert.
+     * Returns the digest algorithm ID (one of {@code USE_SHA1} or {@code USE_SHA256}) to be used
+     * for signing an OTA update package using the private key corresponding to the provided
+     * certificate.
      */
-    private static int getDigestAlgorithm(X509Certificate cert, int minSdkVersion) {
+    private static int getDigestAlgorithmForOta(X509Certificate cert) {
         String sigAlg = cert.getSigAlgName().toUpperCase(Locale.US);
         if ("SHA1WITHRSA".equals(sigAlg) || "MD5WITHRSA".equals(sigAlg)) {
             // see "HISTORICAL NOTE" above.
-            if (minSdkVersion < MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES) {
-                return USE_SHA1;
-            } else {
-                return USE_SHA256;
-            }
+            return USE_SHA1;
         } else if (sigAlg.startsWith("SHA256WITH")) {
             return USE_SHA256;
         } else {
@@ -152,27 +171,39 @@
         }
     }
 
-    /** Returns the expected signature algorithm for this key type. */
-    private static String getSignatureAlgorithm(X509Certificate cert, int minSdkVersion) {
-        String keyType = cert.getPublicKey().getAlgorithm().toUpperCase(Locale.US);
-        if ("RSA".equalsIgnoreCase(keyType)) {
-            if ((minSdkVersion >= MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES)
-                    || (getDigestAlgorithm(cert, minSdkVersion) == USE_SHA256)) {
-                return "SHA256withRSA";
-            } else {
-                return "SHA1withRSA";
-            }
-        } else if ("EC".equalsIgnoreCase(keyType)) {
-            return "SHA256withECDSA";
+    /**
+     * Returns the JCA {@link java.security.Signature} algorithm to be used for signing and OTA
+     * or v1 signing an APK using the private key corresponding to the provided certificate and the
+     * provided digest algorithm (see {@code USE_SHA1} and {@code USE_SHA256} constants).
+     */
+    private static String getJcaSignatureAlgorithmForV1SigningOrOta(
+            X509Certificate cert, int hash) {
+        String sigAlgDigestPrefix;
+        switch (hash) {
+            case USE_SHA1:
+                sigAlgDigestPrefix = "SHA1";
+                break;
+            case USE_SHA256:
+                sigAlgDigestPrefix = "SHA256";
+                break;
+            default:
+                throw new IllegalArgumentException("Unknown hash ID: " + hash);
+        }
+
+        String keyAlgorithm = cert.getPublicKey().getAlgorithm();
+        if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+            return sigAlgDigestPrefix + "withRSA";
+        } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+            return sigAlgDigestPrefix + "withECDSA";
         } else {
-            throw new IllegalArgumentException("unsupported key type: " + keyType);
+            throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
         }
     }
 
-    // Files matching this pattern are not copied to the output.
-    private static Pattern stripPattern =
-        Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|(" +
-                        Pattern.quote(JarFile.MANIFEST_NAME) + ")$");
+    /* Files matching this pattern are not copied to the output. */
+    private static final Pattern STRIP_PATTERN =
+            Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|("
+                    + Pattern.quote(JarFile.MANIFEST_NAME) + ")$");
 
     private static X509Certificate readPublicKey(File file)
         throws IOException, GeneralSecurityException {
@@ -282,8 +313,9 @@
      * Add the hash(es) of every file to the manifest, creating it if
      * necessary.
      */
-    private static Manifest addDigestsToManifest(JarFile jar, int hashes)
-        throws IOException, GeneralSecurityException {
+    private static Manifest addDigestsToManifest(
+            JarFile jar, Pattern ignoredFilenamePattern, int hashes)
+                    throws IOException, GeneralSecurityException {
         Manifest input = jar.getManifest();
         Manifest output = new Manifest();
         Attributes main = output.getMainAttributes();
@@ -319,8 +351,9 @@
 
         for (JarEntry entry: byName.values()) {
             String name = entry.getName();
-            if (!entry.isDirectory() &&
-                (stripPattern == null || !stripPattern.matcher(name).matches())) {
+            if (!entry.isDirectory()
+                    && (ignoredFilenamePattern == null
+                            || !ignoredFilenamePattern.matcher(name).matches())) {
                 InputStream data = jar.getInputStream(entry);
                 while ((num = data.read(buffer)) > 0) {
                     if (md_sha1 != null) md_sha1.update(buffer, 0, num);
@@ -363,16 +396,13 @@
      * Add a copy of the public key to the archive; this should
      * exactly match one of the files in
      * /system/etc/security/otacerts.zip on the device.  (The same
-     * cert can be extracted from the CERT.RSA file but this is much
-     * easier to get at.)
+     * cert can be extracted from the OTA update package's signature
+     * block but this is much easier to get at.)
      */
     private static void addOtacert(JarOutputStream outputJar,
                                    File publicKeyFile,
-                                   long timestamp,
-                                   Manifest manifest,
-                                   int hash)
+                                   long timestamp)
         throws IOException, GeneralSecurityException {
-        MessageDigest md = MessageDigest.getInstance(hash == USE_SHA1 ? "SHA1" : "SHA256");
 
         JarEntry je = new JarEntry(OTACERT_NAME);
         je.setTime(timestamp);
@@ -382,14 +412,8 @@
         int read;
         while ((read = input.read(b)) != -1) {
             outputJar.write(b, 0, read);
-            md.update(b, 0, read);
         }
         input.close();
-
-        Attributes attr = new Attributes();
-        attr.putValue(hash == USE_SHA1 ? "SHA1-Digest" : "SHA-256-Digest",
-                      new String(Base64.encode(md.digest()), "ASCII"));
-        manifest.getEntries().put(OTACERT_NAME, attr);
     }
 
 
@@ -483,7 +507,7 @@
 
     /** Sign data and write the digital signature to 'out'. */
     private static void writeSignatureBlock(
-        CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey, int minSdkVersion,
+        CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey, int hash,
         OutputStream out)
         throws IOException,
                CertificateEncodingException,
@@ -495,7 +519,8 @@
 
         CMSSignedDataGenerator gen = new CMSSignedDataGenerator();
         ContentSigner signer =
-                new JcaContentSignerBuilder(getSignatureAlgorithm(publicKey, minSdkVersion))
+                new JcaContentSignerBuilder(
+                        getJcaSignatureAlgorithmForV1SigningOrOta(publicKey, hash))
                         .build(privateKey);
         gen.addSignerInfoGenerator(
             new JcaSignerInfoGeneratorBuilder(
@@ -513,18 +538,31 @@
     }
 
     /**
-     * Copy all the files in a manifest from input to output.  We set
-     * the modification times in the output to a fixed time, so as to
-     * reduce variation in the output file and make incremental OTAs
-     * more efficient.
+     * Copy all JAR entries from input to output. We set the modification times in the output to a
+     * fixed time, so as to reduce variation in the output file and make incremental OTAs more
+     * efficient.
      */
-    private static void copyFiles(Manifest manifest, JarFile in, JarOutputStream out,
-                                  long timestamp, int defaultAlignment) throws IOException {
+    private static void copyFiles(JarFile in,
+            Pattern ignoredFilenamePattern,
+            JarOutputStream out,
+            long timestamp,
+            int defaultAlignment) throws IOException {
         byte[] buffer = new byte[4096];
         int num;
 
-        Map<String, Attributes> entries = manifest.getEntries();
-        ArrayList<String> names = new ArrayList<String>(entries.keySet());
+        ArrayList<String> names = new ArrayList<String>();
+        for (Enumeration<JarEntry> e = in.entries(); e.hasMoreElements();) {
+            JarEntry entry = e.nextElement();
+            if (entry.isDirectory()) {
+                continue;
+            }
+            String entryName = entry.getName();
+            if ((ignoredFilenamePattern != null)
+                    && (ignoredFilenamePattern.matcher(entryName).matches())) {
+                continue;
+            }
+            names.add(entryName);
+        }
         Collections.sort(names);
 
         boolean firstEntry = true;
@@ -686,21 +724,21 @@
         private final File publicKeyFile;
         private final X509Certificate publicKey;
         private final PrivateKey privateKey;
+        private final int hash;
         private final long timestamp;
-        private final int minSdkVersion;
         private final OutputStream outputStream;
         private final ASN1ObjectIdentifier type;
         private WholeFileSignerOutputStream signer;
 
         public CMSSigner(JarFile inputJar, File publicKeyFile,
-                         X509Certificate publicKey, PrivateKey privateKey, long timestamp,
-                         int minSdkVersion, OutputStream outputStream) {
+                         X509Certificate publicKey, PrivateKey privateKey, int hash,
+                         long timestamp, OutputStream outputStream) {
             this.inputJar = inputJar;
             this.publicKeyFile = publicKeyFile;
             this.publicKey = publicKey;
             this.privateKey = privateKey;
+            this.hash = hash;
             this.timestamp = timestamp;
-            this.minSdkVersion = minSdkVersion;
             this.outputStream = outputStream;
             this.type = new ASN1ObjectIdentifier(CMSObjectIdentifiers.data.getId());
         }
@@ -725,19 +763,8 @@
                 signer = new WholeFileSignerOutputStream(out, outputStream);
                 JarOutputStream outputJar = new JarOutputStream(signer);
 
-                int hash = getDigestAlgorithm(publicKey, minSdkVersion);
-
-                Manifest manifest = addDigestsToManifest(inputJar, hash);
-                copyFiles(manifest, inputJar, outputJar, timestamp, 0);
-                addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
-
-                signFile(manifest,
-                         new X509Certificate[]{ publicKey },
-                         new PrivateKey[]{ privateKey },
-                         timestamp,
-                         minSdkVersion,
-                         false, // Don't sign using APK Signature Scheme v2
-                         outputJar);
+                copyFiles(inputJar, STRIP_PATTERN, outputJar, timestamp, 0);
+                addOtacert(outputJar, publicKeyFile, timestamp);
 
                 signer.notifyClosing();
                 outputJar.close();
@@ -753,7 +780,7 @@
                    CertificateEncodingException,
                    OperatorCreationException,
                    CMSException {
-            SignApk.writeSignatureBlock(this, publicKey, privateKey, minSdkVersion, temp);
+            SignApk.writeSignatureBlock(this, publicKey, privateKey, hash, temp);
         }
 
         public WholeFileSignerOutputStream getSigner() {
@@ -763,10 +790,10 @@
 
     private static void signWholeFile(JarFile inputJar, File publicKeyFile,
                                       X509Certificate publicKey, PrivateKey privateKey,
-                                      long timestamp, int minSdkVersion,
+                                      int hash, long timestamp,
                                       OutputStream outputStream) throws Exception {
         CMSSigner cmsOut = new CMSSigner(inputJar, publicKeyFile,
-                publicKey, privateKey, timestamp, minSdkVersion, outputStream);
+                publicKey, privateKey, hash, timestamp, outputStream);
 
         ByteArrayOutputStream temp = new ByteArrayOutputStream();
 
@@ -831,9 +858,8 @@
     }
 
     private static void signFile(Manifest manifest,
-                                 X509Certificate[] publicKey, PrivateKey[] privateKey,
+                                 X509Certificate[] publicKey, PrivateKey[] privateKey, int[] hash,
                                  long timestamp,
-                                 int minSdkVersion,
                                  boolean additionallySignedUsingAnApkSignatureScheme,
                                  JarOutputStream outputJar)
         throws Exception {
@@ -855,7 +881,7 @@
             writeSignatureFile(
                     manifest,
                     baos,
-                    getDigestAlgorithm(publicKey[k], minSdkVersion),
+                    hash[k],
                     additionallySignedUsingAnApkSignatureScheme);
             byte[] signedData = baos.toByteArray();
             outputJar.write(signedData);
@@ -868,7 +894,7 @@
             je.setTime(timestamp);
             outputJar.putNextEntry(je);
             writeSignatureBlock(new CMSProcessableByteArray(signedData),
-                                publicKey[k], privateKey[k], minSdkVersion, outputJar);
+                                publicKey[k], privateKey[k], hash[k], outputJar);
         }
     }
 
@@ -992,7 +1018,7 @@
             } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
                 return ApkSignerV2.SIGNATURE_ECDSA_WITH_SHA512;
             } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
-                return ApkSignerV2.SIGNATURE_DSA_WITH_SHA512;
+                throw new IllegalArgumentException("SHA-512 is not supported with DSA");
             } else {
                 throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
             }
@@ -1075,7 +1101,6 @@
 
         JarFile inputJar = null;
         FileOutputStream outputFile = null;
-        int hashes = 0;
 
         try {
             File firstPublicKeyFile = new File(args[argstart+0]);
@@ -1085,7 +1110,6 @@
                 for (int i = 0; i < numKeys; ++i) {
                     int argNum = argstart + i*2;
                     publicKey[i] = readPublicKey(new File(args[argNum]));
-                    hashes |= getDigestAlgorithm(publicKey[i], minSdkVersion);
                 }
             } catch (IllegalArgumentException e) {
                 System.err.println(e);
@@ -1111,10 +1135,11 @@
             // NOTE: Signing currently recompresses any compressed entries using Deflate (default
             // compression level for OTA update files and maximum compession level for APKs).
             if (signWholeFile) {
-                SignApk.signWholeFile(inputJar, firstPublicKeyFile,
-                                      publicKey[0], privateKey[0],
-                                      timestamp, minSdkVersion,
-                                      outputFile);
+                int digestAlgorithm = getDigestAlgorithmForOta(publicKey[0]);
+                signWholeFile(inputJar, firstPublicKeyFile,
+                        publicKey[0], privateKey[0], digestAlgorithm,
+                        timestamp,
+                        outputFile);
             } else {
                 // Generate, in memory, an APK signed using standard JAR Signature Scheme.
                 ByteArrayOutputStream v1SignedApkBuf = new ByteArrayOutputStream();
@@ -1122,12 +1147,19 @@
                 // Use maximum compression for compressed entries because the APK lives forever on
                 // the system partition.
                 outputJar.setLevel(9);
-                Manifest manifest = addDigestsToManifest(inputJar, hashes);
-                copyFiles(manifest, inputJar, outputJar, timestamp, alignment);
+                int v1DigestAlgorithmBitSet = 0;
+                int[] v1DigestAlgorithm = new int[numKeys];
+                for (int i = 0; i < numKeys; ++i) {
+                    v1DigestAlgorithm[i] = getV1DigestAlgorithmForApk(publicKey[i], minSdkVersion);
+                    v1DigestAlgorithmBitSet |= v1DigestAlgorithm[i];
+                }
+                Manifest manifest =
+                        addDigestsToManifest(inputJar, STRIP_PATTERN, v1DigestAlgorithmBitSet);
+                copyFiles(inputJar, STRIP_PATTERN, outputJar, timestamp, alignment);
                 signFile(
                         manifest,
-                        publicKey, privateKey,
-                        timestamp, minSdkVersion, signUsingApkSignatureSchemeV2,
+                        publicKey, privateKey, v1DigestAlgorithm,
+                        timestamp, signUsingApkSignatureSchemeV2,
                         outputJar);
                 outputJar.close();
                 ByteBuffer v1SignedApk = ByteBuffer.wrap(v1SignedApkBuf.toByteArray());
diff --git a/tools/warn.py b/tools/warn.py
index 8097123..a4a9e16 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -1,12 +1,20 @@
 #!/usr/bin/env python
 # This file uses the following encoding: utf-8
 
+import argparse
 import sys
 import re
 
-if len(sys.argv) == 1:
-    print 'usage: ' + sys.argv[0] + ' <build.log>'
-    sys.exit()
+parser = argparse.ArgumentParser(description='Convert a build log into HTML')
+parser.add_argument('--url',
+                    help='Root URL of an Android source code tree prefixed '
+                    'before files in warnings')
+parser.add_argument('--separator',
+                    help='Separator between the end of a URL and the line '
+                    'number argument. e.g. #')
+parser.add_argument(dest='buildlog', metavar='build.log',
+                    help='Path to build.log file')
+args = parser.parse_args()
 
 # if you add another level, don't forget to give it a color below
 class severity:
@@ -16,7 +24,8 @@
     HIGH=2
     MEDIUM=3
     LOW=4
-    HARMLESS=5
+    TIDY=5
+    HARMLESS=6
 
 def colorforseverity(sev):
     if sev == severity.FIXMENOW:
@@ -27,12 +36,31 @@
         return 'orange'
     if sev == severity.LOW:
         return 'yellow'
+    if sev == severity.TIDY:
+        return 'peachpuff'
     if sev == severity.HARMLESS:
         return 'limegreen'
     if sev == severity.UNKNOWN:
-        return 'blue'
+        return 'lightblue'
     return 'grey'
 
+def headerforseverity(sev):
+    if sev == severity.FIXMENOW:
+        return 'Critical warnings, fix me now'
+    if sev == severity.HIGH:
+        return 'High severity warnings'
+    if sev == severity.MEDIUM:
+        return 'Medium severity warnings'
+    if sev == severity.LOW:
+        return 'Low severity warnings'
+    if sev == severity.HARMLESS:
+        return 'Harmless warnings'
+    if sev == severity.TIDY:
+        return 'Clang-Tidy warnings'
+    if sev == severity.UNKNOWN:
+        return 'Unknown warnings'
+    return 'Unhandled warnings'
+
 warnpatterns = [
     { 'category':'make',    'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'make: overriding commands/ignoring old commands',
@@ -46,9 +74,35 @@
         'patterns':[r".*: warning: conflicting types for '.+'"] },
     { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'-Wtype-limits',
         'description':'Expression always evaluates to true or false',
-        'patterns':[r".*: warning: comparison is always false due to limited range of data type",
-                    r".*: warning: comparison of unsigned expression >= 0 is always true",
-                    r".*: warning: comparison of unsigned expression < 0 is always false"] },
+        'patterns':[r".*: warning: comparison is always .+ due to limited range of data type",
+                    r".*: warning: comparison of unsigned .*expression .+ is always true",
+                    r".*: warning: comparison of unsigned .*expression .+ is always false"] },
+    { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'',
+        'description':'Potential leak of memory, bad free, use after free',
+        'patterns':[r".*: warning: Potential leak of memory",
+                    r".*: warning: Potential memory leak",
+                    r".*: warning: Memory allocated by .+ should be deallocated by .+ not .+",
+                    r".*: warning: 'delete' applied to a pointer that was allocated",
+                    r".*: warning: Use of memory after it is freed",
+                    r".*: warning: Argument to .+ is the address of .+ variable",
+                    r".*: warning: Argument to free\(\) is offset by .+ of memory allocated by",
+                    r".*: warning: Attempt to .+ released memory"] },
+    { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'',
+        'description':'Return address of stack memory',
+        'patterns':[r".*: warning: Address of stack memory .+ returned to caller",
+                    r".*: warning: Address of stack memory .+ will be a dangling reference"] },
+    { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'',
+        'description':'Problem with vfork',
+        'patterns':[r".*: warning: This .+ is prohibited after a successful vfork",
+                    r".*: warning: Call to function 'vfork' is insecure "] },
+    { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'infinite-recursion',
+        'description':'Infinite recursion',
+        'patterns':[r".*: warning: all paths through this function will call itself"] },
+    { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'',
+        'description':'Potential buffer overflow',
+        'patterns':[r".*: warning: Size argument is greater than .+ the destination buffer",
+                    r".*: warning: Potential buffer overflow.",
+                    r".*: warning: String copy function overflows destination buffer"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'Incompatible pointer types',
         'patterns':[r".*: warning: assignment from incompatible pointer type",
@@ -64,10 +118,16 @@
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wunused',
         'description':'Unused function, variable or label',
         'patterns':[r".*: warning: '.+' defined but not used",
+                    r".*: warning: unused function '.+'",
+                    r".*: warning: private field '.+' is not used",
                     r".*: warning: unused variable '.+'"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wunused-value',
-        'description':'Statement with no effect',
-        'patterns':[r".*: warning: statement with no effect"] },
+        'description':'Statement with no effect or result unused',
+        'patterns':[r".*: warning: statement with no effect",
+                    r".*: warning: expression result unused"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wunused-result',
+        'description':'Ignoreing return value of function',
+        'patterns':[r".*: warning: ignoring return value of function .+Wunused-result"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wmissing-field-initializers',
         'description':'Missing initializer',
         'patterns':[r".*: warning: missing initializer"] },
@@ -76,10 +136,19 @@
         'patterns':[r".*: warning: \(near initialization for '.+'\)"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wformat',
         'description':'Format string does not match arguments',
-        'patterns':[r".*: warning: format '.+' expects type '.+', but argument [0-9]+ has type '.+'"] },
+        'patterns':[r".*: warning: format '.+' expects type '.+', but argument [0-9]+ has type '.+'",
+                    r".*: warning: more '%' conversions than data arguments",
+                    r".*: warning: data argument not used by format string",
+                    r".*: warning: incomplete format specifier",
+                    r".*: warning: format .+ expects .+ but argument .+Wformat=",
+                    r".*: warning: field precision should have .+ but argument has .+Wformat",
+                    r".*: warning: format specifies type .+ but the argument has .*type .+Wformat"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wformat-extra-args',
         'description':'Too many arguments for format string',
         'patterns':[r".*: warning: too many arguments for format"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wformat-invalid-specifier',
+        'description':'Invalid format specifier',
+        'patterns':[r".*: warning: invalid .+ specifier '.+'.+format-invalid-specifier"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wsign-compare',
         'description':'Comparison between signed and unsigned',
         'patterns':[r".*: warning: comparison between signed and unsigned",
@@ -104,6 +173,8 @@
         'description':'Qualifier discarded',
         'patterns':[r".*: warning: passing argument [0-9]+ of '.+' discards qualifiers from pointer target type",
                     r".*: warning: assignment discards qualifiers from pointer target type",
+                    r".*: warning: passing .+ to parameter of type .+ discards qualifiers",
+                    r".*: warning: assigning to .+ from .+ discards qualifiers",
                     r".*: warning: return discards qualifiers from pointer target type"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wattributes',
         'description':'Attribute ignored',
@@ -125,7 +196,8 @@
         'patterns':[r".*: warning: '.+' may be used uninitialized in this function"] },
     { 'category':'C/C++',   'severity':severity.HIGH,     'members':[], 'option':'-Wuninitialized',
         'description':'Variable is used uninitialized',
-        'patterns':[r".*: warning: '.+' is used uninitialized in this function"] },
+        'patterns':[r".*: warning: '.+' is used uninitialized in this function",
+                    r".*: warning: variable '.+' is uninitialized when used here"] },
     { 'category':'ld',      'severity':severity.MEDIUM,   'members':[], 'option':'-fshort-enums',
         'description':'ld: possible enum size mismatch',
         'patterns':[r".*: warning: .* uses variable-size enums yet the output is to use 32-bit enums; use of enum values across objects may fail"] },
@@ -149,6 +221,9 @@
         'patterns':[r".*: warning: suggest explicit braces to avoid ambiguous 'else'",
                     r".*: warning: suggest parentheses around arithmetic in operand of '.+'",
                     r".*: warning: suggest parentheses around comparison in operand of '.+'",
+                    r".*: warning: logical not is only applied to the left hand side of this comparison",
+                    r".*: warning: using the result of an assignment as a condition without parentheses",
+                    r".*: warning: .+ has lower precedence than .+ be evaluated first .+Wparentheses",
                     r".*: warning: suggest parentheses around '.+?' .+ '.+?'",
                     r".*: warning: suggest parentheses around assignment used as truth value"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
@@ -157,6 +232,16 @@
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wimplicit int',
         'description':'No type or storage class (will default to int)',
         'patterns':[r".*: warning: data definition has no type or storage class"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
+        'description':'Null pointer',
+        'patterns':[r".*: warning: Dereference of null pointer",
+                    r".*: warning: Called .+ pointer is null",
+                    r".*: warning: Forming reference to null pointer",
+                    r".*: warning: Returning null reference",
+                    r".*: warning: Null pointer passed as an argument to a 'nonnull' parameter",
+                    r".*: warning: .+ results in a null pointer dereference",
+                    r".*: warning: Access to .+ results in a dereference of a null pointer",
+                    r".*: warning: Null pointer argument in"] },
     { 'category':'cont.',   'severity':severity.SKIP,     'members':[], 'option':'',
         'description':'',
         'patterns':[r".*: warning: type defaults to 'int' in declaration of '.+'"] },
@@ -213,13 +298,1016 @@
         'patterns':[r".*: warning: previous declaration of '.+' was here"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wswitch-enum',
         'description':'Enum value not handled in switch',
-        'patterns':[r".*: warning: enumeration value '.+' not handled in switch"] },
+        'patterns':[r".*: warning: .*enumeration value.* not handled in switch.+Wswitch"] },
     { 'category':'java',    'severity':severity.MEDIUM,   'members':[], 'option':'-encoding',
         'description':'Java: Non-ascii characters used, but ascii encoding specified',
         'patterns':[r".*: warning: unmappable character for encoding ascii"] },
     { 'category':'java',    'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'Java: Non-varargs call of varargs method with inexact argument type for last parameter',
         'patterns':[r".*: warning: non-varargs call of varargs method with inexact argument type for last parameter"] },
+    { 'category':'java',    'severity':severity.MEDIUM,   'members':[], 'option':'',
+        'description':'Java: Unchecked method invocation',
+        'patterns':[r".*: warning: \[unchecked\] unchecked method invocation: .+ in class .+"] },
+    { 'category':'java',    'severity':severity.MEDIUM,   'members':[], 'option':'',
+        'description':'Java: Unchecked conversion',
+        'patterns':[r".*: warning: \[unchecked\] unchecked conversion"] },
+
+    # Warnings from Error Prone.
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description': 'Java: Use of deprecated member',
+     'patterns': [r'.*: warning: \[deprecation\] .+']},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description': 'Java: Unchecked conversion',
+     'patterns': [r'.*: warning: \[unchecked\] .+']},
+
+    # Warnings from Error Prone (auto generated list).
+    {'category': 'java',
+     'severity': severity.LOW,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Deprecated item is not annotated with @Deprecated',
+     'patterns': [r".*: warning: \[DepAnn\] .+"]},
+    {'category': 'java',
+     'severity': severity.LOW,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Fallthrough warning suppression has no effect if warning is suppressed',
+     'patterns': [r".*: warning: \[FallthroughSuppression\] .+"]},
+    {'category': 'java',
+     'severity': severity.LOW,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Prefer \'L\' to \'l\' for the suffix to long literals',
+     'patterns': [r".*: warning: \[LongLiteralLowerCaseSuffix\] .+"]},
+    {'category': 'java',
+     'severity': severity.LOW,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @Binds is a more efficient and declaritive mechanism for delegating a binding.',
+     'patterns': [r".*: warning: \[UseBinds\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Assertions may be disabled at runtime and do not guarantee that execution will halt here; consider throwing an exception instead',
+     'patterns': [r".*: warning: \[AssertFalse\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Classes that implement Annotation must override equals and hashCode. Consider using AutoAnnotation instead of implementing Annotation by hand.',
+     'patterns': [r".*: warning: \[BadAnnotationImplementation\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: BigDecimal(double) and BigDecimal.valueOf(double) may lose precision, prefer BigDecimal(String) or BigDecimal(long)',
+     'patterns': [r".*: warning: \[BigDecimalLiteralDouble\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Mockito cannot mock final classes',
+     'patterns': [r".*: warning: \[CannotMockFinalClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This code, which counts elements using a loop, can be replaced by a simpler library method',
+     'patterns': [r".*: warning: \[ElementsCountedInLoop\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Empty top-level type declaration',
+     'patterns': [r".*: warning: \[EmptyTopLevelDeclaration\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Classes that override equals should also override hashCode.',
+     'patterns': [r".*: warning: \[EqualsHashCode\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: An equality test between objects with incompatible types always returns false',
+     'patterns': [r".*: warning: \[EqualsIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: If you return or throw from a finally, then values returned or thrown from the try-catch block will be ignored. Consider using try-with-resources instead.',
+     'patterns': [r".*: warning: \[Finally\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This annotation has incompatible modifiers as specified by its @IncompatibleModifiers annotation',
+     'patterns': [r".*: warning: \[IncompatibleModifiers\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Class should not implement both `Iterable` and `Iterator`',
+     'patterns': [r".*: warning: \[IterableAndIterator\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Floating-point comparison without error tolerance',
+     'patterns': [r".*: warning: \[JUnit3FloatingPointComparisonWithoutDelta\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Test class inherits from JUnit 3\'s TestCase but has JUnit 4 @Test annotations.',
+     'patterns': [r".*: warning: \[JUnitAmbiguousTestClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Enum switch statement is missing cases',
+     'patterns': [r".*: warning: \[MissingCasesInEnumSwitch\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Not calling fail() when expecting an exception masks bugs',
+     'patterns': [r".*: warning: \[MissingFail\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: method overrides method in supertype; expected @Override',
+     'patterns': [r".*: warning: \[MissingOverride\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Source files should not contain multiple top-level class declarations',
+     'patterns': [r".*: warning: \[MultipleTopLevelClasses\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This update of a volatile variable is non-atomic',
+     'patterns': [r".*: warning: \[NonAtomicVolatileUpdate\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Static import of member uses non-canonical name',
+     'patterns': [r".*: warning: \[NonCanonicalStaticMemberImport\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: equals method doesn\'t override Object.equals',
+     'patterns': [r".*: warning: \[NonOverridingEquals\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Constructors should not be annotated with @Nullable since they cannot return null',
+     'patterns': [r".*: warning: \[NullableConstructor\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @Nullable should not be used for primitive types since they cannot be null',
+     'patterns': [r".*: warning: \[NullablePrimitive\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: void-returning methods should not be annotated with @Nullable, since they cannot return null',
+     'patterns': [r".*: warning: \[NullableVoid\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Package names should match the directory they are declared in',
+     'patterns': [r".*: warning: \[PackageLocation\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Second argument to Preconditions.* is a call to String.format(), which can be unwrapped',
+     'patterns': [r".*: warning: \[PreconditionsErrorMessageEagerEvaluation\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Preconditions only accepts the %s placeholder in error message strings',
+     'patterns': [r".*: warning: \[PreconditionsInvalidPlaceholder\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Passing a primitive array to a varargs method is usually wrong',
+     'patterns': [r".*: warning: \[PrimitiveArrayPassedToVarargsMethod\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Protobuf fields cannot be null, so this check is redundant',
+     'patterns': [r".*: warning: \[ProtoFieldPreconditionsCheckNotNull\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This annotation is missing required modifiers as specified by its @RequiredModifiers annotation',
+     'patterns': [r".*: warning: \[RequiredModifiers\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: A static variable or method should not be accessed from an object instance',
+     'patterns': [r".*: warning: \[StaticAccessedFromInstance\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: String comparison using reference equality instead of value equality',
+     'patterns': [r".*: warning: \[StringEquality\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Declaring a type parameter that is only used in the return type is a misuse of generics: operations on the type parameter are unchecked, it hides unsafe casts at invocations of the method, and it interacts badly with method overload resolution.',
+     'patterns': [r".*: warning: \[TypeParameterUnusedInFormals\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Using static imports for types is unnecessary',
+     'patterns': [r".*: warning: \[UnnecessaryStaticImport\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Unsynchronized method overrides a synchronized method.',
+     'patterns': [r".*: warning: \[UnsynchronizedOverridesSynchronized\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Non-constant variable missing @Var annotation',
+     'patterns': [r".*: warning: \[Var\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Because of spurious wakeups, Object.wait() and Condition.await() must always be called in a loop',
+     'patterns': [r".*: warning: \[WaitNotInLoop\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Subclasses of Fragment must be instantiable via Class#newInstance(): the class must be public, static and have a public nullary constructor',
+     'patterns': [r".*: warning: \[FragmentNotInstantiable\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Hardcoded reference to /sdcard',
+     'patterns': [r".*: warning: \[HardCodedSdCardPath\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Incompatible type as argument to Object-accepting Java collections method',
+     'patterns': [r".*: warning: \[CollectionIncompatibleType\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @AssistedInject and @Inject should not be used on different constructors in the same class.',
+     'patterns': [r".*: warning: \[AssistedInjectAndInjectOnConstructors\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Although Guice allows injecting final fields, doing so is not recommended because the injected value may not be visible to other threads.',
+     'patterns': [r".*: warning: \[GuiceInjectOnFinalField\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This method is not annotated with @Inject, but it overrides a method that is annotated with @com.google.inject.Inject. Guice will inject this method, and it is recommended to annotate it explicitly.',
+     'patterns': [r".*: warning: \[OverridesGuiceInjectableMethod\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Double-checked locking on non-volatile fields is unsafe',
+     'patterns': [r".*: warning: \[DoubleCheckedLocking\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Writes to static fields should not be guarded by instance locks',
+     'patterns': [r".*: warning: \[StaticGuardedByInstance\] .+"]},
+    {'category': 'java',
+     'severity': severity.MEDIUM,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Synchronizing on non-final fields is not safe: if the field is ever updated, different threads may end up locking on different objects.',
+     'patterns': [r".*: warning: \[SynchronizeOnNonFinalField\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Reference equality used to compare arrays',
+     'patterns': [r".*: warning: \[ArrayEquals\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: hashcode method on array does not hash array contents',
+     'patterns': [r".*: warning: \[ArrayHashCode\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Calling toString on an array does not provide useful information',
+     'patterns': [r".*: warning: \[ArrayToString.*\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Arrays.asList does not autobox primitive arrays, as one might expect.',
+     'patterns': [r".*: warning: \[ArraysAsListPrimitiveArray\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: AsyncCallable should not return a null Future, only a Future whose result is null.',
+     'patterns': [r".*: warning: \[AsyncCallableReturnsNull\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: AsyncFunction should not return a null Future, only a Future whose result is null.',
+     'patterns': [r".*: warning: \[AsyncFunctionReturnsNull\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Possible sign flip from narrowing conversion',
+     'patterns': [r".*: warning: \[BadComparable\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Shift by an amount that is out of range',
+     'patterns': [r".*: warning: \[BadShiftAmount\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: valueOf provides better time and space performance',
+     'patterns': [r".*: warning: \[BoxedPrimitiveConstructor\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: The called constructor accepts a parameter with the same name and type as one of its caller\'s parameters, but its caller doesn\'t pass that parameter to it.  It\'s likely that it was intended to.',
+     'patterns': [r".*: warning: \[ChainingConstructorIgnoresParameter\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Ignored return value of method that is annotated with @CheckReturnValue',
+     'patterns': [r".*: warning: \[CheckReturnValue\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Inner class is non-static but does not reference enclosing class',
+     'patterns': [r".*: warning: \[ClassCanBeStatic\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: The source file name should match the name of the top-level class it contains',
+     'patterns': [r".*: warning: \[ClassName\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This comparison method violates the contract',
+     'patterns': [r".*: warning: \[ComparisonContractViolated\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Comparison to value that is out of range for the compared type',
+     'patterns': [r".*: warning: \[ComparisonOutOfRange\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Non-compile-time constant expression passed to parameter with @CompileTimeConstant type annotation.',
+     'patterns': [r".*: warning: \[CompileTimeConstant\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Exception created but not thrown',
+     'patterns': [r".*: warning: \[DeadException\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Division by integer literal zero',
+     'patterns': [r".*: warning: \[DivZero\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Empty statement after if',
+     'patterns': [r".*: warning: \[EmptyIf\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: == NaN always returns false; use the isNaN methods instead',
+     'patterns': [r".*: warning: \[EqualsNaN\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Method annotated @ForOverride must be protected or package-private and only invoked from declaring class',
+     'patterns': [r".*: warning: \[ForOverride\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Futures.getChecked requires a checked exception type with a standard constructor.',
+     'patterns': [r".*: warning: \[FuturesGetCheckedIllegalExceptionType\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Calling getClass() on an object of type Class returns the Class object for java.lang.Class; you probably meant to operate on the object directly',
+     'patterns': [r".*: warning: \[GetClassOnClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: An object is tested for equality to itself using Guava Libraries',
+     'patterns': [r".*: warning: \[GuavaSelfEquals\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: contains() is a legacy method that is equivalent to containsValue()',
+     'patterns': [r".*: warning: \[HashtableContains\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Cipher.getInstance() is invoked using either the default settings or ECB mode',
+     'patterns': [r".*: warning: \[InsecureCipherMode\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Invalid syntax used for a regular expression',
+     'patterns': [r".*: warning: \[InvalidPatternSyntax\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: The argument to Class#isInstance(Object) should not be a Class',
+     'patterns': [r".*: warning: \[IsInstanceOfClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: jMock tests must have a @RunWith(JMock.class) annotation, or the Mockery field must have a @Rule JUnit annotation',
+     'patterns': [r".*: warning: \[JMockTestWithoutRunWithOrRuleAnnotation\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Test method will not be run; please prefix name with "test"',
+     'patterns': [r".*: warning: \[JUnit3TestNotRun\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: setUp() method will not be run; Please add a @Before annotation',
+     'patterns': [r".*: warning: \[JUnit4SetUpNotRun\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: tearDown() method will not be run; Please add an @After annotation',
+     'patterns': [r".*: warning: \[JUnit4TearDownNotRun\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Test method will not be run; please add @Test annotation',
+     'patterns': [r".*: warning: \[JUnit4TestNotRun\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Printf-like format string does not match its arguments',
+     'patterns': [r".*: warning: \[MalformedFormatString\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Use of "YYYY" (week year) in a date pattern without "ww" (week in year). You probably meant to use "yyyy" (year) instead.',
+     'patterns': [r".*: warning: \[MisusedWeekYear\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: A bug in Mockito will cause this test to fail at runtime with a ClassCastException',
+     'patterns': [r".*: warning: \[MockitoCast\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Missing method call for verify(mock) here',
+     'patterns': [r".*: warning: \[MockitoUsage\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Modifying a collection with itself',
+     'patterns': [r".*: warning: \[ModifyingCollectionWithItself\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Compound assignments to bytes, shorts, chars, and floats hide dangerous casts',
+     'patterns': [r".*: warning: \[NarrowingCompoundAssignment\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @NoAllocation was specified on this method, but something was found that would trigger an allocation',
+     'patterns': [r".*: warning: \[NoAllocation\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Static import of type uses non-canonical name',
+     'patterns': [r".*: warning: \[NonCanonicalStaticImport\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @CompileTimeConstant parameters should be final',
+     'patterns': [r".*: warning: \[NonFinalCompileTimeConstant\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Calling getAnnotation on an annotation that is not retained at runtime.',
+     'patterns': [r".*: warning: \[NonRuntimeAnnotation\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Numeric comparison using reference equality instead of value equality',
+     'patterns': [r".*: warning: \[NumericEquality\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Comparison using reference equality instead of value equality',
+     'patterns': [r".*: warning: \[OptionalEquality\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Varargs doesn\'t agree for overridden method',
+     'patterns': [r".*: warning: \[Overrides\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Literal passed as first argument to Preconditions.checkNotNull() can never be null',
+     'patterns': [r".*: warning: \[PreconditionsCheckNotNull\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: First argument to `Preconditions.checkNotNull()` is a primitive rather than an object reference',
+     'patterns': [r".*: warning: \[PreconditionsCheckNotNullPrimitive\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Protobuf fields cannot be null',
+     'patterns': [r".*: warning: \[ProtoFieldNullComparison\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Comparing protobuf fields of type String using reference equality',
+     'patterns': [r".*: warning: \[ProtoStringFieldReferenceEquality\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java:  Check for non-whitelisted callers to RestrictedApiChecker.',
+     'patterns': [r".*: warning: \[RestrictedApiChecker\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Return value of this method must be used',
+     'patterns': [r".*: warning: \[ReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Variable assigned to itself',
+     'patterns': [r".*: warning: \[SelfAssignment\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: An object is compared to itself',
+     'patterns': [r".*: warning: \[SelfComparision\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Variable compared to itself',
+     'patterns': [r".*: warning: \[SelfEquality\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: An object is tested for equality to itself',
+     'patterns': [r".*: warning: \[SelfEquals\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Comparison of a size >= 0 is always true, did you intend to check for non-emptiness?',
+     'patterns': [r".*: warning: \[SizeGreaterThanOrEqualsZero\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Calling toString on a Stream does not provide useful information',
+     'patterns': [r".*: warning: \[StreamToString\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: StringBuilder does not have a char constructor; this invokes the int constructor.',
+     'patterns': [r".*: warning: \[StringBuilderInitWithChar\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Suppressing "deprecated" is probably a typo for "deprecation"',
+     'patterns': [r".*: warning: \[SuppressWarningsDeprecated\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: throwIfUnchecked(knownCheckedException) is a no-op.',
+     'patterns': [r".*: warning: \[ThrowIfUncheckedKnownChecked\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Catching Throwable/Error masks failures from fail() or assert*() in the try block',
+     'patterns': [r".*: warning: \[TryFailThrowable\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Type parameter used as type qualifier',
+     'patterns': [r".*: warning: \[TypeParameterQualifier\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Non-generic methods should not be invoked with type arguments',
+     'patterns': [r".*: warning: \[UnnecessaryTypeArgument\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Instance created but never used',
+     'patterns': [r".*: warning: \[UnusedAnonymousClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Use of wildcard imports is forbidden',
+     'patterns': [r".*: warning: \[WildcardImport\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Method parameter has wrong package',
+     'patterns': [r".*: warning: \[ParameterPackage\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Certain resources in `android.R.string` have names that do not match their content',
+     'patterns': [r".*: warning: \[MislabeledAndroidString\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Return value of android.graphics.Rect.intersect() must be checked',
+     'patterns': [r".*: warning: \[RectIntersectReturnValueIgnored\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Invalid printf-style format string',
+     'patterns': [r".*: warning: \[FormatString\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @AssistedInject and @Inject cannot be used on the same constructor.',
+     'patterns': [r".*: warning: \[AssistedInjectAndInjectOnSameConstructor\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Injected constructors cannot be optional nor have binding annotations',
+     'patterns': [r".*: warning: \[InjectedConstructorAnnotations\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: The target of a scoping annotation must be set to METHOD and/or TYPE.',
+     'patterns': [r".*: warning: \[InjectInvalidTargetingOnScopingAnnotation\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Abstract methods are not injectable with javax.inject.Inject.',
+     'patterns': [r".*: warning: \[JavaxInjectOnAbstractMethod\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: @javax.inject.Inject cannot be put on a final field.',
+     'patterns': [r".*: warning: \[JavaxInjectOnFinalField\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: A class may not have more than one injectable constructor.',
+     'patterns': [r".*: warning: \[MoreThanOneInjectableConstructor\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Using more than one qualifier annotation on the same element is not allowed.',
+     'patterns': [r".*: warning: \[InjectMoreThanOneQualifier\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: A class can be annotated with at most one scope annotation',
+     'patterns': [r".*: warning: \[InjectMoreThanOneScopeAnnotationOnClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Annotations cannot be both Qualifiers/BindingAnnotations and Scopes',
+     'patterns': [r".*: warning: \[OverlappingQualifierAndScopeAnnotation\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Scope annotation on an interface or abstact class is not allowed',
+     'patterns': [r".*: warning: \[InjectScopeAnnotationOnInterfaceOrAbstractClass\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Scoping and qualifier annotations must have runtime retention.',
+     'patterns': [r".*: warning: \[InjectScopeOrQualifierAnnotationRetention\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Dagger @Provides methods may not return null unless annotated with @Nullable',
+     'patterns': [r".*: warning: \[DaggerProvidesNull\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Scope annotation on implementation class of AssistedInject factory is not allowed',
+     'patterns': [r".*: warning: \[GuiceAssistedInjectScoping\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: A constructor cannot have two @Assisted parameters of the same type unless they are disambiguated with named @Assisted annotations. ',
+     'patterns': [r".*: warning: \[GuiceAssistedParameters\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This method is not annotated with @Inject, but it overrides a  method that is  annotated with @javax.inject.Inject.',
+     'patterns': [r".*: warning: \[OverridesJavaxInjectableMethod\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Checks for unguarded accesses to fields and methods with @GuardedBy annotations',
+     'patterns': [r".*: warning: \[GuardedByChecker\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Invalid @GuardedBy expression',
+     'patterns': [r".*: warning: \[GuardedByValidator\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: Type declaration annotated with @Immutable is not immutable',
+     'patterns': [r".*: warning: \[Immutable\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This method does not acquire the locks specified by its @LockMethod annotation',
+     'patterns': [r".*: warning: \[LockMethodChecker\] .+"]},
+    {'category': 'java',
+     'severity': severity.HIGH,
+     'members': [],
+     'option': '',
+     'description':
+         'Java: This method does not acquire the locks specified by its @UnlockMethod annotation',
+     'patterns': [r".*: warning: \[UnlockMethod\] .+"]},
+
+    {'category': 'java',
+     'severity': severity.UNKNOWN,
+     'members': [],
+     'option': '',
+     'description': 'Java: Unclassified/unrecognized warnings',
+     'patterns': [r".*: warning: \[.+\] .+"]},
+
     { 'category':'aapt',    'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'aapt: No default translation',
         'patterns':[r".*: warning: string '.+' has no default translation in .*"] },
@@ -241,8 +1329,12 @@
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Warray-bounds',
         'description':'Array subscript out of bounds',
         'patterns':[r".*: warning: array subscript is above array bounds",
+                    r".*: warning: Array subscript is undefined",
                     r".*: warning: array subscript is below array bounds"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
+        'description':'Excess elements in initializer',
+        'patterns':[r".*: warning: excess elements in .+ initializer"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'Decimal constant is unsigned only in ISO C90',
         'patterns':[r".*: warning: this decimal constant is unsigned only in ISO C90"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wmain',
@@ -259,7 +1351,7 @@
         'patterns':[r".*: warning: attempt to free a non-heap object '.+'"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wchar-subscripts',
         'description':'Array subscript has type char',
-        'patterns':[r".*: warning: array subscript has type 'char'"] },
+        'patterns':[r".*: warning: array subscript .+ type 'char'.+Wchar-subscripts"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'Constant too large for type',
         'patterns':[r".*: warning: integer constant is too large for '.+' type"] },
@@ -274,7 +1366,8 @@
         'patterns':[r".*: warning: declaration 'class .+' does not declare anything"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wreorder',
         'description':'Initialization order will be different',
-        'patterns':[r".*: warning: '.+' will be initialized after"] },
+        'patterns':[r".*: warning: '.+' will be initialized after",
+                    r".*: warning: field .+ will be initialized after .+Wreorder"] },
     { 'category':'cont.',   'severity':severity.SKIP,     'members':[], 'option':'',
         'description':'',
         'patterns':[r".*: warning:   '.+'"] },
@@ -307,7 +1400,8 @@
         'patterns':[r".*: warning: function declaration isn't a prototype"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wignored-qualifiers',
         'description':'Type qualifiers ignored on function return value',
-        'patterns':[r".*: warning: type qualifiers ignored on function return type"] },
+        'patterns':[r".*: warning: type qualifiers ignored on function return type",
+                    r".*: warning: .+ type qualifier .+ has no effect .+Wignored-qualifiers"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'',
         'description':'&lt;foo&gt; declared inside parameter list, scope limited to this definition',
         'patterns':[r".*: warning: '.+' declared inside parameter list"] },
@@ -320,12 +1414,24 @@
     { 'category':'C/C++',   'severity':severity.LOW,      'members':[], 'option':'-Wcomment',
         'description':'Comment inside comment',
         'patterns':[r".*: warning: "".+"" within comment"] },
+    { 'category':'C/C++',   'severity':severity.LOW,      'members':[], 'option':'',
+        'description':'Value stored is never read',
+        'patterns':[r".*: warning: Value stored to .+ is never read"] },
+    { 'category':'C/C++',   'severity':severity.LOW,      'members':[], 'option':'-Wdeprecated-declarations',
+        'description':'Deprecated declarations',
+        'patterns':[r".*: warning: .+ is deprecated.+deprecated-declarations"] },
+    { 'category':'C/C++',   'severity':severity.LOW,      'members':[], 'option':'-Wdeprecated-register',
+        'description':'Deprecated register',
+        'patterns':[r".*: warning: 'register' storage class specifier is deprecated"] },
+    { 'category':'C/C++',   'severity':severity.LOW,      'members':[], 'option':'-Wpointer-sign',
+        'description':'Converts between pointers to integer types with different sign',
+        'patterns':[r".*: warning: .+ converts between pointers to integer types with different sign"] },
     { 'category':'C/C++',   'severity':severity.HARMLESS, 'members':[], 'option':'',
         'description':'Extra tokens after #endif',
         'patterns':[r".*: warning: extra tokens at end of #endif directive"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wenum-compare',
         'description':'Comparison between different enums',
-        'patterns':[r".*: warning: comparison between 'enum .+' and 'enum .+'"] },
+        'patterns':[r".*: warning: comparison between '.+' and '.+'.+Wenum-compare"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wconversion',
         'description':'Implicit conversion of negative number to unsigned type',
         'patterns':[r".*: warning: converting negative value '.+' to '.+'"] },
@@ -342,9 +1448,13 @@
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wctor-dtor-privacy',
         'description':'Class seems unusable because of private ctor/dtor' ,
         'patterns':[r".*: warning: 'class .+' only defines private constructors and has no friends"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wgnu-static-float-init',
+        'description':'In-class initializer for static const float/double' ,
+        'patterns':[r".*: warning: in-class initializer for static data member of .+const (float|double)"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wpointer-arith',
         'description':'void* used in arithmetic' ,
         'patterns':[r".*: warning: pointer of type 'void \*' used in (arithmetic|subtraction)",
+                    r".*: warning: arithmetic on .+ to void is a GNU extension.*Wpointer-arith",
                     r".*: warning: wrong type argument to increment"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,   'members':[], 'option':'-Wsign-promo',
         'description':'Overload resolution chose to promote from unsigned or enum to signed type' ,
@@ -359,23 +1469,159 @@
         'description':'Converting from <type> to <other type>',
         'patterns':[r".*: warning: converting to '.+' from '.+'"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'VLA has zero or negative size',
+        'patterns':[r".*: warning: Declared variable-length array \(VLA\) has .+ size"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
         'description':'Return value from void function',
         'patterns':[r".*: warning: 'return' with a value, in function returning void"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'multichar',
+        'description':'Multi-character character constant',
+        'patterns':[r".*: warning: multi-character character constant"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'writable-strings',
+        'description':'Conversion from string literal to char*',
+        'patterns':[r".*: warning: .+ does not allow conversion from string literal to 'char \*'"] },
     { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'',
         'description':'Useless specifier',
         'patterns':[r".*: warning: useless storage class specifier in empty declaration"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'-Wduplicate-decl-specifier',
+        'description':'Duplicate declaration specifier',
+        'patterns':[r".*: warning: duplicate '.+' declaration specifier"] },
     { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'',
         'description':'Duplicate logtag',
-        'patterns':[r".*: warning: tag "".+"" \(None\) duplicated in .+"] },
+        'patterns':[r".*: warning: tag \".+\" \(.+\) duplicated in .+"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'typedef-redefinition',
+        'description':'Typedef redefinition',
+        'patterns':[r".*: warning: redefinition of typedef '.+' is a C11 feature"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'gnu-designator',
+        'description':'GNU old-style field designator',
+        'patterns':[r".*: warning: use of GNU old-style field designator extension"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'missing-field-initializers',
+        'description':'Missing field initializers',
+        'patterns':[r".*: warning: missing field '.+' initializer"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'missing-braces',
+        'description':'Missing braces',
+        'patterns':[r".*: warning: suggest braces around initialization of",
+                    r".*: warning: too many braces around scalar initializer .+Wmany-braces-around-scalar-init",
+                    r".*: warning: braces around scalar initializer"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'sign-compare',
+        'description':'Comparison of integers of different signs',
+        'patterns':[r".*: warning: comparison of integers of different signs.+sign-compare"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'dangling-else',
+        'description':'Add braces to avoid dangling else',
+        'patterns':[r".*: warning: add explicit braces to avoid dangling else"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'initializer-overrides',
+        'description':'Initializer overrides prior initialization',
+        'patterns':[r".*: warning: initializer overrides prior initialization of this subobject"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'self-assign',
+        'description':'Assigning value to self',
+        'patterns':[r".*: warning: explicitly assigning value of .+ to itself"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'gnu-variable-sized-type-not-at-end',
+        'description':'GNU extension, variable sized type not at end',
+        'patterns':[r".*: warning: field '.+' with variable sized type '.+' not at the end of a struct or class"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'tautological-constant-out-of-range-compare',
+        'description':'Comparison of constant is always false/true',
+        'patterns':[r".*: comparison of .+ is always .+Wtautological-constant-out-of-range-compare"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'overloaded-virtual',
+        'description':'Hides overloaded virtual function',
+        'patterns':[r".*: '.+' hides overloaded virtual function"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'incompatible-pointer-types',
+        'description':'Incompatible pointer types',
+        'patterns':[r".*: warning: incompatible pointer types .+Wincompatible-pointer-types"] },
+    { 'category':'logtags',   'severity':severity.LOW,     'members':[], 'option':'asm-operand-widths',
+        'description':'ASM value size does not match register size',
+        'patterns':[r".*: warning: value size does not match register size specified by the constraint and modifier"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'literal-suffix',
+        'description':'Needs a space between literal and string macro',
+        'patterns':[r".*: warning: invalid suffix on literal.+ requires a space .+Wliteral-suffix"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'#warnings',
+        'description':'Warnings from #warning',
+        'patterns':[r".*: warning: .+-W#warnings"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'absolute-value',
+        'description':'Using float/int absolute value function with int/float argument',
+        'patterns':[r".*: warning: using .+ absolute value function .+ when argument is .+ type .+Wabsolute-value"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'',
+        'description':'Refers to implicitly defined namespace',
+        'patterns':[r".*: warning: using directive refers to implicitly-defined namespace .+"] },
+    { 'category':'C/C++',   'severity':severity.LOW,     'members':[], 'option':'-Winvalid-pp-token',
+        'description':'Invalid pp token',
+        'patterns':[r".*: warning: missing .+Winvalid-pp-token"] },
+
     { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
         'description':'Operator new returns NULL',
         'patterns':[r".*: warning: 'operator new' must not return NULL unless it is declared 'throw\(\)' .+"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
         'description':'NULL used in arithmetic',
         'patterns':[r".*: warning: NULL used in arithmetic"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'header-guard',
+        'description':'Misspelled header guard',
+        'patterns':[r".*: warning: '.+' is used as a header guard .+ followed by .+ different macro"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'empty-body',
+        'description':'Empty loop body',
+        'patterns':[r".*: warning: .+ loop has empty body"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'enum-conversion',
+        'description':'Implicit conversion from enumeration type',
+        'patterns':[r".*: warning: implicit conversion from enumeration type '.+'"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'switch',
+        'description':'case value not in enumerated type',
+        'patterns':[r".*: warning: case value not in enumerated type '.+'"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Undefined result',
+        'patterns':[r".*: warning: The result of .+ is undefined",
+                    r".*: warning: 'this' pointer cannot be null in well-defined C\+\+ code;",
+                    r".*: warning: shifting a negative signed value is undefined"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Division by zero',
+        'patterns':[r".*: warning: Division by zero"] },
     { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
         'description':'Use of deprecated method',
         'patterns':[r".*: warning: '.+' is deprecated .+"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Use of garbage or uninitialized value',
+        'patterns':[r".*: warning: .+ is a garbage value",
+                    r".*: warning: Function call argument is an uninitialized value",
+                    r".*: warning: Undefined or garbage value returned to caller",
+                    r".*: warning: Called .+ pointer is.+uninitialized",
+                    r".*: warning: Called .+ pointer is.+uninitalized",  # match a typo in compiler message
+                    r".*: warning: Use of zero-allocated memory",
+                    r".*: warning: Dereference of undefined pointer value",
+                    r".*: warning: Passed-by-value .+ contains uninitialized data",
+                    r".*: warning: Branch condition evaluates to a garbage value",
+                    r".*: warning: The .+ of .+ is an uninitialized value.",
+                    r".*: warning: .+ is used uninitialized whenever .+sometimes-uninitialized",
+                    r".*: warning: Assigned value is garbage or undefined"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Result of malloc type incompatible with sizeof operand type',
+        'patterns':[r".*: warning: Result of '.+' is converted to .+ incompatible with sizeof operand type"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Return value not checked',
+        'patterns':[r".*: warning: The return value from .+ is not checked"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Possible heap pollution',
+        'patterns':[r".*: warning: .*Possible heap pollution from .+ type .+"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Allocation size of 0 byte',
+        'patterns':[r".*: warning: Call to .+ has an allocation size of 0 byte"] },
+    { 'category':'C/C++',   'severity':severity.MEDIUM,     'members':[], 'option':'',
+        'description':'Result of malloc type incompatible with sizeof operand type',
+        'patterns':[r".*: warning: Result of '.+' is converted to .+ incompatible with sizeof operand type"] },
+
+    { 'category':'C/C++',   'severity':severity.HARMLESS,     'members':[], 'option':'',
+        'description':'Discarded qualifier from pointer target type',
+        'patterns':[r".*: warning: .+ discards '.+' qualifier from pointer target type"] },
+    { 'category':'C/C++',   'severity':severity.HARMLESS,     'members':[], 'option':'',
+        'description':'Use snprintf instead of sprintf',
+        'patterns':[r".*: warning: .*sprintf is often misused; please use snprintf"] },
+    { 'category':'C/C++',   'severity':severity.HARMLESS,     'members':[], 'option':'',
+        'description':'Unsupported optimizaton flag',
+        'patterns':[r".*: warning: optimization flag '.+' is not supported"] },
+    { 'category':'C/C++',   'severity':severity.HARMLESS,     'members':[], 'option':'',
+        'description':'Extra or missing parentheses',
+        'patterns':[r".*: warning: equality comparison with extraneous parentheses",
+                    r".*: warning: .+ within .+Wlogical-op-parentheses"] },
+    { 'category':'C/C++',   'severity':severity.HARMLESS,     'members':[], 'option':'mismatched-tags',
+        'description':'Mismatched class vs struct tags',
+        'patterns':[r".*: warning: '.+' defined as a .+ here but previously declared as a .+mismatched-tags",
+                    r".*: warning: .+ was previously declared as a .+mismatched-tags"] },
 
     # these next ones are to deal with formatting problems resulting from the log being mixed up by 'make -j'
     { 'category':'C/C++',   'severity':severity.SKIP,     'members':[], 'option':'',
@@ -388,6 +1634,48 @@
         'description':'',
         'patterns':[r".*: warning: In file included from .+,"] },
 
+    # warnings from clang-tidy
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy readability',
+        'patterns':[r".*: .+\[readability-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy c++ core guidelines',
+        'patterns':[r".*: .+\[cppcoreguidelines-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy google-runtime',
+        'patterns':[r".*: .+\[google-runtime-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy google-build',
+        'patterns':[r".*: .+\[google-build-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy google-explicit',
+        'patterns':[r".*: .+\[google-explicit-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy google-readability',
+        'patterns':[r".*: .+\[google-readability-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy google-global',
+        'patterns':[r".*: .+\[google-global-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy modernize',
+        'patterns':[r".*: .+\[modernize-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy misc',
+        'patterns':[r".*: .+\[misc-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy CERT',
+        'patterns':[r".*: .+\[cert-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy llvm',
+        'patterns':[r".*: .+\[llvm-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy clang-diagnostic',
+        'patterns':[r".*: .+\[clang-diagnostic-.+\]$"] },
+    { 'category':'C/C++',   'severity':severity.TIDY,     'members':[], 'option':'',
+        'description':'clang-tidy clang-analyzer',
+        'patterns':[r".*: .+\[clang-analyzer-.+\]$",
+                    r".*: Call Path : .+$"] },
+
     # catch-all for warnings this script doesn't know about yet
     { 'category':'C/C++',   'severity':severity.UNKNOWN,  'members':[], 'option':'',
         'description':'Unclassified/unrecognized warnings',
@@ -406,6 +1694,7 @@
 
 def dumphtmlprologue(title):
     output('<html>\n<head>\n<title>' + title + '</title>\n<body>\n')
+    output('<a name="PageTop">')
     output(htmlbig(title))
     output('<p>\n')
 
@@ -414,18 +1703,23 @@
     output('<tr bgcolor="' + row_colors[cur_row_color] + '"><td colspan="2">',)
     cur_row_color = 1 - cur_row_color
     output(text,)
-    output('</td></tr>')
+    output('</td></tr>\n')
 
-def begintable(text, backgroundcolor):
+def begintable(text, backgroundcolor, extraanchor):
     global anchor
     output('<table border="1" rules="cols" frame="box" width="100%" bgcolor="black"><tr bgcolor="' +
-        backgroundcolor + '"><a name="anchor' + str(anchor) + '"><td>')
+        backgroundcolor + '"><a name="anchor' + str(anchor) + '">')
+    if extraanchor:
+        output('<a name="' + extraanchor + '">')
+    output('<td>')
     output(htmlbig(text[0]) + '<br>')
     for i in text[1:]:
         output(i + '<br>')
     output('</td>')
-    output('<td width="100" bgcolor="grey"><a align="right" href="#anchor' + str(anchor-1) +
-        '">previous</a><br><a align="right" href="#anchor' + str(anchor+1) + '">next</a>')
+    output('<td width="100" bgcolor="grey">' +
+           '<a align="right" href="#PageTop">top</a><br>' +
+           '<a align="right" href="#anchor' + str(anchor-1) + '">previous</a><br>' +
+           '<a align="right" href="#anchor' + str(anchor+1) + '">next</a>')
     output('</td></a></tr>')
     anchor += 1
 
@@ -438,17 +1732,49 @@
     known = 0
     unknown = 0
     for i in warnpatterns:
+        i['members'] = sorted(set(i['members']))
         if i['severity'] == severity.UNKNOWN:
             unknown += len(i['members'])
         elif i['severity'] != severity.SKIP:
             known += len(i['members'])
-    output('Number of classified warnings: <b>' + str(known) + '</b><br>' )
-    output('Number of unclassified warnings: <b>' + str(unknown) + '</b><br>')
+    output('\nNumber of classified warnings: <b>' + str(known) + '</b><br>' )
+    output('\nNumber of unclassified warnings: <b>' + str(unknown) + '</b><br>')
     total = unknown + known
-    output('Total number of warnings: <b>' + str(total) + '</b>')
+    output('\nTotal number of warnings: <b>' + str(total) + '</b>')
     if total < 1000:
         output('(low count may indicate incremental build)')
-    output('<p>')
+    output('\n<p>\n')
+
+# dump count of warnings of a given severity in TOC
+def dumpcount(sev):
+    first = True
+    for i in warnpatterns:
+      if i['severity'] == sev and len(i['members']) > 0:
+          if first:
+              output(headerforseverity(sev) + ':\n<blockquote>' +
+                     '<table border="1" frame="box" width="100%">')
+          output('<tr bgcolor="' + colorforseverity(sev) + '">' +
+                 '<td><a href="#' + i['anchor'] + '">' + descriptionfor(i) +
+                 ' (' + str(len(i['members'])) + ')</a></td></tr>\n')
+          first = False
+    if not first:
+        output('</table></blockquote>\n')
+
+# dump table of content, list of all warning patterns
+def dumptoc():
+    n = 1
+    output('<blockquote>\n')
+    for i in warnpatterns:
+        i['anchor'] = 'Warning' + str(n)
+        n += 1
+    dumpcount(severity.FIXMENOW)
+    dumpcount(severity.HIGH)
+    dumpcount(severity.MEDIUM)
+    dumpcount(severity.LOW)
+    dumpcount(severity.TIDY)
+    dumpcount(severity.HARMLESS)
+    dumpcount(severity.UNKNOWN)
+    output('</blockquote>\n<p>\n')
 
 def allpatterns(cat):
     pats = ''
@@ -470,11 +1796,24 @@
         if len(i['members']) == 0 and i['severity'] != severity.SKIP:
             if tablestarted == False:
                 tablestarted = True
-                begintable(['Fixed warnings', 'No more occurences. Please consider turning these in to errors if possible, before they are reintroduced in to the build'], 'blue')
+                begintable(['Fixed warnings', 'No more occurences. Please consider turning these in to errors if possible, before they are reintroduced in to the build'], 'blue', '')
             tablerow(i['description'] + ' (' + allpatterns(i) + ') ' + i['option'])
     if tablestarted:
         endtable()
 
+def warningwithurl(line):
+    if not args.url:
+        return line
+    m = re.search( r'^([^ :]+):(\d+):(.+)', line, re.M|re.I)
+    if not m:
+        return line
+    filepath = m.group(1)
+    linenumber = m.group(2)
+    warning = m.group(3)
+    if args.separator:
+        return '<a href="' + args.url + '/' + filepath + args.separator + linenumber + '">' + filepath + ':' + linenumber + '</a>:' + warning
+    else:
+        return '<a href="' + args.url + '/' + filepath + '">' + filepath + '</a>:' + linenumber + ':' + warning
 
 # dump a category, provided it is not marked as 'SKIP' and has more than 0 occurrences
 def dumpcategory(cat):
@@ -482,9 +1821,9 @@
         header = [descriptionfor(cat),str(len(cat['members'])) + ' occurences:']
         if cat['option'] != '':
             header[1:1] = [' (related option: ' + cat['option'] +')']
-        begintable(header, colorforseverity(cat['severity']))
+        begintable(header, colorforseverity(cat['severity']), cat['anchor'])
         for i in cat['members']:
-            tablerow(i)
+            tablerow(warningwithurl(i))
         endtable()
 
 
@@ -514,7 +1853,7 @@
         for pat in i['patterns']:
             i['compiledpatterns'].append(re.compile(pat))
 
-infile = open(sys.argv[1], 'r')
+infile = open(args.buildlog, 'r')
 warnings = []
 
 platformversion = 'unknown'
@@ -553,11 +1892,15 @@
 # dump the html output to stdout
 dumphtmlprologue('Warnings for ' + platformversion + ' - ' + targetproduct + ' - ' + targetvariant)
 dumpstats()
+# sort table based on number of members once dumpstats has deduplicated the
+# members.
+warnpatterns.sort(reverse=True, key=lambda i: len(i['members']))
+dumptoc()
 dumpseverity(severity.FIXMENOW)
 dumpseverity(severity.HIGH)
 dumpseverity(severity.MEDIUM)
 dumpseverity(severity.LOW)
+dumpseverity(severity.TIDY)
 dumpseverity(severity.HARMLESS)
 dumpseverity(severity.UNKNOWN)
 dumpfixed()
-
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index a2dfd02..aef91a5 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -33,12 +33,12 @@
     fprintf(stderr, "Copyright (C) 2009 The Android Open Source Project\n\n");
     fprintf(stderr,
         "Usage: zipalign [-f] [-p] [-v] [-z] <align> infile.zip outfile.zip\n"
-        "       zipalign -c [-v] <align> infile.zip\n\n" );
+        "       zipalign -c [-p] [-v] <align> infile.zip\n\n" );
     fprintf(stderr,
         "  <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
     fprintf(stderr, "  -c: check alignment only (does not modify file)\n");
     fprintf(stderr, "  -f: overwrite existing outfile.zip\n");
-    fprintf(stderr, "  -p: page align stored shared object files\n");
+    fprintf(stderr, "  -p: memory page alignment for stored shared object files\n");
     fprintf(stderr, "  -v: verbose output\n");
     fprintf(stderr, "  -z: recompress using Zopfli\n");
 }
diff --git a/tools/zipalign/ZipEntry.h b/tools/zipalign/ZipEntry.h
index e06567d..431b6db 100644
--- a/tools/zipalign/ZipEntry.h
+++ b/tools/zipalign/ZipEntry.h
@@ -290,7 +290,7 @@
             mExtraField(NULL),
             mFileComment(NULL)
         {}
-        virtual ~CentralDirEntry(void) {
+        ~CentralDirEntry(void) {
             delete[] mFileName;
             delete[] mExtraField;
             delete[] mFileComment;