[DO NOT MERGE] Updating Security String to 2017-08-05 on nyc-dev bug:62388772 am: 2b93753bc4 am: 8828ffc63e am: edda32c5e3 -s ours am: af114e0286 -s ours am: 332ef64b0e -s ours am: c56a72d5fc -s ours am: e4f93ecc22 -s ours
am: c41c875e40
Change-Id: Ib38cf38a4af75d573ec114c0f0162e0708ab0957
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 88f9172..b955e25 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -417,6 +417,9 @@
# Soong module variant change, remove obsolete intermediates
$(call add-clean-step, rm -rf $(OUT_DIR)/soong/.intermediates)
+# Version checking moving to Soong
+$(call add-clean-step, rm -rf $(OUT_DIR)/versions_checked.mk)
+
# ************************************************
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
# ************************************************
diff --git a/core/Makefile b/core/Makefile
index 961f35d..738e9d6 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -225,11 +225,16 @@
BUILDINFO_SH := build/tools/buildinfo.sh
VENDOR_BUILDINFO_SH := build/tools/vendor_buildinfo.sh
-# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test harness to distinguish builds.
+# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test
+# harness to distinguish builds. Only add _asan for a sanitized build
+# if it isn't already a part of the flavor (via a dedicated lunch
+# config for example).
TARGET_BUILD_FLAVOR := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
ifdef SANITIZE_TARGET
+ifeq (,$(findstring _asan,$(TARGET_BUILD_FLAVOR)))
TARGET_BUILD_FLAVOR := $(TARGET_BUILD_FLAVOR)_asan
endif
+endif
ifdef TARGET_SYSTEM_PROP
system_prop_file := $(TARGET_SYSTEM_PROP)
@@ -857,11 +862,11 @@
$(hide) zip -qjX $@ $<
$(remove-timestamps-from-package)
-# Carry the public key for update_engine if it's a non-Brillo target that
+# Carry the public key for update_engine if it's a non-IoT target that
# uses the AB updater. We use the same key as otacerts but in RSA public key
# format.
ifeq ($(AB_OTA_UPDATER),true)
-ifeq ($(BRILLO),)
+ifneq ($(PRODUCT_IOT),true)
ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
$(hide) rm -f $@
@@ -996,10 +1001,10 @@
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(notdir $(FUTILITY))" >> $(1))
$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_signing_args=$(INTERNAL_AVB_SIGNING_ARGS)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_avbtool=$(AVBTOOL)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "system_avb_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "avb_avbtool=$(notdir $(AVBTOOL))" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "system_avb_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "system_avb_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
-$(if $(BOARD_AVB_ENABLE),$(hide) echo "vendor_avb_enable=$(BOARD_AVB_ENABLE)" >> $(1))
+$(if $(BOARD_AVB_ENABLE),$(hide) echo "vendor_avb_hashtree_enable=$(BOARD_AVB_ENABLE)" >> $(1))
$(if $(BOARD_AVB_ENABLE),$(hide) echo "vendor_avb_add_hashtree_footer_args=$(BOARD_AVB_VENDOR_ADD_HASHTREE_FOOTER_ARGS)" >> $(1))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
$(hide) echo "recovery_as_boot=true" >> $(1))
@@ -1226,6 +1231,9 @@
ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
$(INSTALLED_BOOTIMAGE_TARGET) : $(VBOOT_SIGNER)
endif
+ifeq (true,$(BOARD_AVB_ENABLE))
+$(INSTALLED_BOOTIMAGE_TARGET) : $(AVBTOOL)
+endif
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
$(INSTALLED_RAMDISK_TARGET) \
$(INTERNAL_RECOVERYIMAGE_FILES) \
@@ -1842,7 +1850,7 @@
ifdef INSTALLED_DTBOIMAGE_TARGET
INTERNAL_AVB_MAKE_VBMETA_IMAGE_ARGS += \
- --include_descriptors_from_image $(INSTALLED_DTBOIMAGE_TARGET)
+ --include_descriptors_from_image $(INSTALLED_DTBOIMAGE_TARGET)
endif
ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
@@ -1940,6 +1948,7 @@
$(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar \
$(HOST_OUT_JAVA_LIBRARIES)/signapk.jar \
$(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
+ $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar \
$(HOST_OUT_EXECUTABLES)/make_ext4fs \
$(HOST_OUT_EXECUTABLES)/mkuserimg.sh \
$(HOST_OUT_EXECUTABLES)/mke2fs \
@@ -1961,6 +1970,7 @@
$(HOST_OUT_EXECUTABLES)/brillo_update_payload \
$(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
$(HOST_OUT_EXECUTABLES)/delta_generator \
+ $(AVBTOOL) \
$(BLK_ALLOC_TO_BASE_FS)
ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
@@ -1969,12 +1979,6 @@
$(VBOOT_SIGNER)
endif
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
-OTATOOLS += \
- $(FUTILITY) \
- $(VBOOT_SIGNER)
-endif
-
# Shared libraries.
OTATOOLS += \
$(HOST_LIBRARY_PATH)/libc++$(HOST_SHLIB_SUFFIX) \
@@ -2009,23 +2013,32 @@
BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
$(BUILT_OTATOOLS_PACKAGE): zip_root := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
-$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) | $(ACP)
+OTATOOLS_DEPS := \
+ system/extras/verity/build_verity_metadata.py \
+ system/extras/ext4_utils/mke2fs.conf \
+ external/avb/test/data/testkey_rsa4096.pem \
+ $(shell find build/target/product/security -type f -name \*.x509.pem -o -name \*.pk8 -o \
+ -name verity_key | sort) \
+ $(shell find device vendor -type f -name \*.pk8 -o -name verifiedboot\* -o \
+ -name \*.x509.pem -o -name oem\*.prop | sort)
+
+OTATOOLS_RELEASETOOLS := \
+ $(shell find build/tools/releasetools -name \*.pyc -prune -o -type f | sort)
+
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+OTATOOLS_DEPS += \
+ $(shell find external/vboot_reference/tests/devkeys -type f | sort)
+endif
+
+$(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) $(OTATOOLS_DEPS) $(OTATOOLS_RELEASETOOLS) | $(ACP)
@echo "Package OTA tools: $@"
$(hide) rm -rf $@ $(zip_root)
- $(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools $(zip_root)/system/extras/verity $(zip_root)/system/extras/ext4_utils
+ $(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
- $(hide) $(ACP) $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar $(zip_root)/framework/
- $(hide) $(ACP) -p system/extras/verity/build_verity_metadata.py $(zip_root)/system/extras/verity/
- $(hide) $(ACP) -p system/extras/ext4_utils/mke2fs.conf $(zip_root)/system/extras/ext4_utils/
$(hide) $(ACP) -r -d -p build/tools/releasetools/* $(zip_root)/releasetools
-ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
- $(hide) mkdir -p $(zip_root)/external/vboot_reference/tests/devkeys
- $(hide) $(ACP) -r -d -p external/vboot_reference/tests/devkeys/* $(zip_root)/external/vboot_reference/tests/devkeys
-endif
$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
$(hide) (cd $(zip_root) && zip -qryX $(abspath $@) *)
- $(hide) zip -qryX $(abspath $@) build/target/product/security/
- $(hide) find device vendor -name \*.pk8 -o -name verifiedboot\* -o -name \*.x509.pem -o -name oem\*.prop | xargs zip -qryX $(abspath $@)>/dev/null || true
+ $(hide) echo $(OTATOOLS_DEPS) | xargs zip -qryX $(abspath $@)>/dev/null || true
.PHONY: otatools-package
otatools-package: $(BUILT_OTATOOLS_PACKAGE)
@@ -2292,10 +2305,7 @@
ifeq ($(BOARD_AVB_ENABLE),true)
$(hide) echo "board_avb_enable=true" >> $(zip_root)/META/misc_info.txt
$(hide) echo "board_avb_rollback_index=$(BOARD_AVB_ROLLBACK_INDEX)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "board_avb_key_path=$(BOARD_AVB_KEY_PATH)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "board_avb_algorithm=$(BOARD_AVB_ALGORITHM)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "board_avb_boot_add_hash_footer_args=$(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
- $(hide) echo "board_avb_system_add_hashtree_footer_args=$(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "board_avb_make_vbmeta_image_args=$(BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS)" >> $(zip_root)/META/misc_info.txt
endif
ifdef BOARD_BPT_INPUT_FILES
@@ -2349,6 +2359,16 @@
$(hide) mkdir -p $(zip_root)/IMAGES
$(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/
endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+ $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
+ $(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
+ $(hide) echo "has_dtbo=true" >> $(zip_root)/META/misc_info.txt
+ifeq ($(BOARD_AVB_ENABLE),true)
+ $(hide) echo "dtbo_size=$(BOARD_DTBOIMG_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "board_avb_dtbo_add_hash_footer_args=$(BOARD_AVB_DTBO_ADD_HASH_FOOTER_ARGS)" \
+ >> $(zip_root)/META/misc_info.txt
+endif
+endif
@# Run fs_config on all the system, vendor, boot ramdisk,
@# and recovery ramdisk files in the zip, and save the output
$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
@@ -2416,6 +2436,10 @@
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
+ifeq ($(AB_OTA_UPDATER),true)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BRILLO_UPDATE_PAYLOAD)
+endif
+
$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) \
build/tools/releasetools/ota_from_target_files
@echo "Package OTA: $@"
diff --git a/core/aapt2.mk b/core/aapt2.mk
index a10af67..287f933 100644
--- a/core/aapt2.mk
+++ b/core/aapt2.mk
@@ -40,7 +40,7 @@
ifneq ($(my_generated_res_dirs),)
my_generated_resources_flata := $(my_compiled_res_base_dir)/gen_res.flata
$(my_generated_resources_flata): PRIVATE_SOURCE_RES_DIRS := $(my_generated_res_dirs)
-$(my_generated_resources_flata) : $(my_generated_res_dirs_deps)
+$(my_generated_resources_flata) : $(my_generated_res_dirs_deps) $(AAPT2)
@echo "AAPT2 compile $@ <- $(PRIVATE_SOURCE_RES_DIRS)"
$(call aapt2-compile-resource-dirs)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 92e69bb..c327d2c 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -248,6 +248,8 @@
###########################################################
include $(BUILD_SYSTEM)/configure_module_stem.mk
+LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem)
+
# OVERRIDE_BUILT_MODULE_PATH is only allowed to be used by the
# internal SHARED_LIBRARIES build files.
OVERRIDE_BUILT_MODULE_PATH := $(strip $(OVERRIDE_BUILT_MODULE_PATH))
@@ -255,11 +257,8 @@
ifneq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
$(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
endif
- built_module_path := $(OVERRIDE_BUILT_MODULE_PATH)
-else
- built_module_path := $(intermediates)
+ $(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE),$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)))
endif
-LOCAL_BUILT_MODULE := $(built_module_path)/$(my_built_module_stem)
ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
# Apk and its attachments reside in its own subdir.
@@ -295,6 +294,11 @@
.KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
# Build .toc file when using mm, mma, or make $(my_register_name)
$(my_all_targets): $(LOCAL_BUILT_MODULE).toc
+
+ifdef OVERRIDE_BUILT_MODULE_PATH
+$(eval $(call copy-one-file,$(LOCAL_BUILT_MODULE).toc,$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc))
+$(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem).toc: $(OVERRIDE_BUILT_MODULE_PATH)/$(my_built_module_stem)
+endif
endif
###########################################################
@@ -431,9 +435,27 @@
###########################################################
ifdef LOCAL_COMPATIBILITY_SUITE
+# If we are building a native test or benchmark and its stem variants are not defined,
+# separate the multiple architectures into subdirectories of the testcase folder.
+arch_dir :=
+is_native :=
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_TESTS)
+ is_native := true
+endif
+ifeq ($(LOCAL_MODULE_CLASS),NATIVE_BENCHMARK)
+ is_native := true
+endif
+ifdef LOCAL_MULTILIB
+ is_native := true
+endif
+ifdef is_native
+ arch_dir := /$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+ is_native :=
+endif
+
# The module itself.
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \
$(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem))))
# Make sure we only add the files once for multilib modules.
@@ -461,6 +483,13 @@
$(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
$(LOCAL_PATH)/DynamicConfig.xml:$(dir)/$(LOCAL_MODULE).dynamic)))
endif
+
+ifneq (,$(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config))
+$(foreach extra_config, $(wildcard $(LOCAL_PATH)/$(LOCAL_MODULE)_*.config), \
+ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+ $(eval my_compat_dist_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+ $(extra_config):$(dir)/$(notdir $(extra_config))))))
+endif
endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
$(call create-suite-dependencies)
diff --git a/core/binary.mk b/core/binary.mk
index 589c462..625d348 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -309,7 +309,7 @@
ifneq ($(LOCAL_NO_PIC),true)
ifneq ($($(my_prefix)OS),windows)
ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-my_cflags += -fpie
+my_cflags += -fPIE
else
my_cflags += -fPIC
endif
@@ -515,37 +515,6 @@
my_asflags += -D__ASSEMBLY__
###########################################################
-## When compiling against the VNDK, use LL-NDK libraries
-###########################################################
-ifneq ($(LOCAL_USE_VNDK),)
- ####################################################
- ## Soong modules may be built twice, once for /system
- ## and once for /vendor. If we're using the VNDK,
- ## switch all soong libraries over to the /vendor
- ## variant.
- ####################################################
- ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
- # Soong-built libraries should always use the .vendor variant
- my_whole_static_libraries := $(addsuffix .vendor,$(my_whole_static_libraries))
- my_static_libraries := $(addsuffix .vendor,$(my_static_libraries))
- my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
- my_system_shared_libraries := $(addsuffix .vendor,$(my_system_shared_libraries))
- my_header_libraries := $(addsuffix .vendor,$(my_header_libraries))
- else
- my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
- $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_static_libraries := $(foreach l,$(my_static_libraries),\
- $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_shared_libraries := $(foreach l,$(my_shared_libraries),\
- $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
- $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
- my_header_libraries := $(foreach l,$(my_header_libraries),\
- $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
- endif
-endif
-
-###########################################################
## Define PRIVATE_ variables from global vars
###########################################################
ifndef LOCAL_IS_HOST_MODULE
@@ -1343,6 +1312,36 @@
asm_objects += $(asm_objects_asm)
endif
+###########################################################
+## When compiling against the VNDK, use LL-NDK libraries
+###########################################################
+ifneq ($(LOCAL_USE_VNDK),)
+ ####################################################
+ ## Soong modules may be built twice, once for /system
+ ## and once for /vendor. If we're using the VNDK,
+ ## switch all soong libraries over to the /vendor
+ ## variant.
+ ####################################################
+ ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+ # Soong-built libraries should always use the .vendor variant
+ my_whole_static_libraries := $(addsuffix .vendor,$(my_whole_static_libraries))
+ my_static_libraries := $(addsuffix .vendor,$(my_static_libraries))
+ my_shared_libraries := $(addsuffix .vendor,$(my_shared_libraries))
+ my_system_shared_libraries := $(addsuffix .vendor,$(my_system_shared_libraries))
+ my_header_libraries := $(addsuffix .vendor,$(my_header_libraries))
+ else
+ my_whole_static_libraries := $(foreach l,$(my_whole_static_libraries),\
+ $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_static_libraries := $(foreach l,$(my_static_libraries),\
+ $(if $(SPLIT_VENDOR.STATIC_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_shared_libraries := $(foreach l,$(my_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries),\
+ $(if $(SPLIT_VENDOR.SHARED_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ my_header_libraries := $(foreach l,$(my_header_libraries),\
+ $(if $(SPLIT_VENDOR.HEADER_LIBRARIES.$(l)),$(l).vendor,$(l)))
+ endif
+endif
##########################################################
## Set up installed module dependency
@@ -1396,39 +1395,28 @@
## other NDK-built libraries
####################################################
-my_link_type := $(intermediates)/link_type
-all_link_types: $(my_link_type)
ifdef LOCAL_SDK_VERSION
-$(my_link_type): PRIVATE_LINK_TYPE := native:ndk
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := native:ndk
+my_warn_types :=
+my_allowed_types := native:ndk
else ifdef LOCAL_USE_VNDK
-$(my_link_type): PRIVATE_LINK_TYPE := native:vendor
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:vendor
+my_link_type := native:vendor
+my_warn_types :=
+my_allowed_types := native:vendor
else
-$(my_link_type): PRIVATE_LINK_TYPE := native:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := native:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-my_link_type_deps := $(strip \
- $(foreach l,$(my_whole_static_libraries) $(my_static_libraries), \
- $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-ifneq ($(LOCAL_MODULE_CLASS),STATIC_LIBRARIES)
-ifneq ($(LOCAL_MODULE_CLASS),HEADER_LIBRARIES)
-my_link_type_deps += $(strip \
- $(foreach l,$(my_shared_libraries), \
- $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/link_type))
-endif
-endif
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
- @echo Check module type: $@
- $(check-link-type)
+my_link_deps := $(addprefix STATIC_LIBRARIES:,$(my_whole_static_libraries) $(my_static_libraries))
+ifneq ($(filter-out STATIC_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
+my_link_deps += $(addprefix SHARED_LIBRARIES:,$(my_shared_libraries))
+endif
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
###########################################################
## Common object handling.
@@ -1696,6 +1684,12 @@
ifeq ($(my_tidy_flags),)
my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
endif
+
+ # We might be using the static analyzer through clang-tidy.
+ # https://bugs.llvm.org/show_bug.cgi?id=32914
+ ifneq ($(my_tidy_checks),)
+ my_tidy_flags += "-extra-arg-before=-D__clang_analyzer__"
+ endif
endif
endif
@@ -1815,7 +1809,7 @@
.KATI_RESTAT: $(export_includes)
# Make sure export_includes gets generated when you are running mm/mmm
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+$(LOCAL_BUILT_MODULE) : | $(export_includes)
ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
ifneq (,$(filter-out $(LOCAL_PATH)/%,$(my_export_c_include_dirs)))
@@ -1826,10 +1820,11 @@
$(SOONG_CONV.$(LOCAL_MODULE).PROBLEMS) $(my_soong_problems)
SOONG_CONV.$(LOCAL_MODULE).DEPS := \
$(SOONG_CONV.$(LOCAL_MODULE).DEPS) \
- $(my_static_libraries) \
- $(my_whole_static_libraries) \
- $(my_shared_libraries) \
- $(my_system_shared_libraries)
+ $(filter-out $($(LOCAL_2ND_ARCH_VAR_PREFIX)UBSAN_RUNTIME_LIBRARY),\
+ $(my_static_libraries) \
+ $(my_whole_static_libraries) \
+ $(my_shared_libraries) \
+ $(my_system_shared_libraries))
SOONG_CONV := $(SOONG_CONV) $(LOCAL_MODULE)
endif
diff --git a/core/build-system.html b/core/build-system.html
index e72e141..c7938cc 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -592,6 +592,17 @@
</ul>
</p>
+<h4>LOCAL_ANNOTATION_PROCESSORS</h4>
+<p>Set this to a list of modules built with <code>BUILD_HOST_JAVA_LIBRARY</code>
+to have their jars passed to javac with -processorpath for use as annotation
+processors.</p>
+
+<h4>LOCAL_ANNOTATION_PROCESSOR_CLASSES</h4>
+<p>Set this to a list of classes to be passed to javac as -processor arguments.
+This list is would be unnecessary, as javac will autodetect annotation processor
+classes, except that the Grok tool that is used on the Android source code
+does not autodetect them and requires listing them manually.</p>
+
<h4>LOCAL_ASSET_FILES</h4>
<p>In Android.mk files that <code>include $(BUILD_PACKAGE)</code> set this
to the set of files you want built into your app. Usually:</p>
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
index c5cc690..c2473cd 100644
--- a/core/clang/versions.mk
+++ b/core/clang/versions.mk
@@ -1,4 +1,4 @@
## Clang/LLVM release versions.
-LLVM_PREBUILTS_VERSION ?= clang-3859424
+LLVM_PREBUILTS_VERSION ?= clang-4053586
LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index fa89758..0e1c88d 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -14,7 +14,7 @@
#
# Don't bother with the cleanspecs if you are running mm/mmm
-ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother),)
+ifeq ($(ONE_SHOT_MAKEFILE)$(dont_bother)$(NO_ANDROID_CLEANSPEC),)
INTERNAL_CLEAN_STEPS :=
@@ -142,143 +142,7 @@
INTERNAL_CLEAN_STEPS :=
INTERNAL_CLEAN_BUILD_VERSION :=
-endif # if not ONE_SHOT_MAKEFILE dont_bother
-
-# Since products and build variants (unfortunately) share the same
-# PRODUCT_OUT staging directory, things can get out of sync if different
-# build configurations are built in the same tree. The following logic
-# will notice when the configuration has changed and remove the files
-# necessary to keep things consistent.
-
-previous_build_config_file := $(PRODUCT_OUT)/previous_build_config.mk
-current_build_config_file := $(PRODUCT_OUT)/current_build_config.mk
-
-current_build_config := \
- $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
-force_installclean := false
-
-# Read the current state from the file, if present.
-# Will set PREVIOUS_BUILD_CONFIG.
-#
-PREVIOUS_BUILD_CONFIG :=
--include $(previous_build_config_file)
-PREVIOUS_BUILD_CONFIG := $(strip $(PREVIOUS_BUILD_CONFIG))
-
-ifdef PREVIOUS_BUILD_CONFIG
- ifneq ($(current_build_config),$(PREVIOUS_BUILD_CONFIG))
- $(info *** Build configuration changed: "$(PREVIOUS_BUILD_CONFIG)" -> "$(current_build_config)")
- ifneq ($(DISABLE_AUTO_INSTALLCLEAN),true)
- force_installclean := true
- else
- $(info DISABLE_AUTO_INSTALLCLEAN is set; skipping auto-clean. Your tree may be in an inconsistent state.)
- endif
- endif
-endif # else, this is the first build, so no need to clean.
-
-# Write the new state to the file.
-#
-$(shell \
- mkdir -p $(dir $(current_build_config_file)) && \
- echo "PREVIOUS_BUILD_CONFIG := $(current_build_config)" > \
- $(current_build_config_file) \
- )
-$(shell cmp $(current_build_config_file) $(previous_build_config_file) > /dev/null 2>&1 || \
- mv -f $(current_build_config_file) $(previous_build_config_file))
-
-PREVIOUS_BUILD_CONFIG :=
-previous_build_config_file :=
-current_build_config_file :=
-current_build_config :=
-
-#
-# installclean logic
-#
-
-# The files/dirs to delete during an installclean.
-#
-# Deletes all of the installed files -- the intent is to remove files
-# that may no longer be installed, either because the user previously
-# installed them, or they were previously installed by default but no
-# longer are.
-#
-# This is faster than a full clean, since we're not deleting the
-# intermediates. Instead of recompiling, we can just copy the results.
-#
-# Host bin, frameworks, and lib* are intentionally omitted, since
-# otherwise we'd have to rebuild any generated files created with those
-# tools.
-installclean_files := \
- $(HOST_OUT)/obj/NOTICE_FILES \
- $(HOST_OUT)/obj/PACKAGING \
- $(HOST_OUT)/coverage \
- $(HOST_OUT)/cts \
- $(HOST_OUT)/nativetest* \
- $(HOST_OUT)/sdk \
- $(HOST_OUT)/sdk_addon \
- $(HOST_OUT)/testcases \
- $(HOST_OUT)/vts \
- $(HOST_CROSS_OUT)/bin \
- $(HOST_CROSS_OUT)/coverage \
- $(HOST_CROSS_OUT)/lib* \
- $(HOST_CROSS_OUT)/nativetest* \
- $(PRODUCT_OUT)/*.img \
- $(PRODUCT_OUT)/*.ini \
- $(PRODUCT_OUT)/*.txt \
- $(PRODUCT_OUT)/*.xlb \
- $(PRODUCT_OUT)/*.zip \
- $(PRODUCT_OUT)/kernel \
- $(PRODUCT_OUT)/data \
- $(PRODUCT_OUT)/skin \
- $(PRODUCT_OUT)/obj/NOTICE_FILES \
- $(PRODUCT_OUT)/obj/PACKAGING \
- $(PRODUCT_OUT)/recovery \
- $(PRODUCT_OUT)/root \
- $(PRODUCT_OUT)/system \
- $(PRODUCT_OUT)/system_other \
- $(PRODUCT_OUT)/vendor \
- $(PRODUCT_OUT)/oem \
- $(PRODUCT_OUT)/obj/FAKE \
- $(PRODUCT_OUT)/breakpad \
- $(PRODUCT_OUT)/cache \
- $(PRODUCT_OUT)/coverage \
- $(PRODUCT_OUT)/installer \
- $(PRODUCT_OUT)/odm \
- $(PRODUCT_OUT)/sysloader \
- $(PRODUCT_OUT)/testcases \
-
-# The files/dirs to delete during a dataclean, which removes any files
-# in the staging and emulator data partitions.
-dataclean_files := \
- $(PRODUCT_OUT)/data/* \
- $(PRODUCT_OUT)/data-qemu/* \
- $(PRODUCT_OUT)/userdata-qemu.img
-
-# make sure *_OUT is set so that we won't result in deleting random parts
-# of the filesystem.
-ifneq (2,$(words $(HOST_OUT) $(PRODUCT_OUT)))
- $(error both HOST_OUT and PRODUCT_OUT should be set at this point.)
-endif
-
-# Define the rules for commandline invocation.
-.PHONY: dataclean
-dataclean: FILES := $(dataclean_files)
-dataclean:
- $(hide) rm -rf $(FILES)
- @echo "Deleted emulator userdata images."
-
-.PHONY: installclean
-installclean: FILES := $(installclean_files)
-installclean: dataclean
- $(hide) rm -rf $(FILES)
- @echo "Deleted images and staging directories."
-
-ifeq ($(force_installclean),true)
- $(info *** Forcing "make installclean"...)
- $(info *** rm -rf $(dataclean_files) $(installclean_files))
- $(shell rm -rf $(dataclean_files) $(installclean_files))
- $(info *** Done with the cleaning, now starting the real build.)
-endif
-force_installclean :=
+endif # if not ONE_SHOT_MAKEFILE dont_bother NO_ANDROID_CLEANSPEC
###########################################################
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 91243c7..ac3593b 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -12,6 +12,8 @@
LOCAL_ADDITIONAL_JAVA_DIR:=
LOCAL_AIDL_INCLUDES:=
LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
+LOCAL_ANNOTATION_PROCESSORS:=
+LOCAL_ANNOTATION_PROCESSOR_CLASSES:=
LOCAL_APK_LIBRARIES:=
LOCAL_ARM_MODE:=
LOCAL_ASFLAGS:=
@@ -113,6 +115,8 @@
LOCAL_JARJAR_RULES:=
LOCAL_JAR_MANIFEST:=
LOCAL_JAR_PACKAGES:=
+LOCAL_JAR_PROCESSOR:=
+LOCAL_JAR_PROCESSOR_ARGS:=
LOCAL_JAVACFLAGS:=
LOCAL_JAVA_LANGUAGE_VERSION:=
LOCAL_JAVA_LAYERS_FILE:=
diff --git a/core/config.mk b/core/config.mk
index 59c2a34..8cfda5f 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -133,11 +133,7 @@
# Parse out any modifier targets.
# ###############################################################
-# The 'showcommands' goal says to show the full command
-# lines being executed, instead of a short message about
-# the kind of operation being done.
-SHOW_COMMANDS:= $(filter showcommands,$(MAKECMDGOALS))
-hide := $(if $(SHOW_COMMANDS),,@)
+hide := @
################################################################
# Tools needed in product configuration makefiles.
@@ -480,6 +476,23 @@
BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS))
+# ---------------------------------------------------------------
+# Whether we can expect a full build graph
+ALLOW_MISSING_DEPENDENCIES := $(filter true,$(ALLOW_MISSING_DEPENDENCIES))
+ifneq ($(TARGET_BUILD_APPS),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifeq ($(TARGET_BUILD_PDK),true)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+ifneq ($(ONE_SHOT_MAKEFILE),)
+ALLOW_MISSING_DEPENDENCIES := true
+endif
+.KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
+
#
# Tools that are prebuilts for TARGET_BUILD_APPS
#
@@ -496,13 +509,24 @@
BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
+#TODO: use a smaller -Xmx value for most libraries;
+# only core.jar and framework.jar need a heap this big.
+ifndef DX_ALT_JAR
DX := $(HOST_OUT_EXECUTABLES)/dx
+DX_COMMAND := $(DX) -JXms16M -JXmx2048M
+else
+DX := $(DX_ALT_JAR)
+DX_COMMAND := java -Xms16M -Xmx2048M -jar $(DX)
+endif
+
MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
FILESLIST := $(SOONG_HOST_OUT_EXECUTABLES)/fileslist
+SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
+
# Always use prebuilts for ckati and makeparallel
prebuilt_build_tools := prebuilts/build-tools
ifeq ($(filter address,$(SANITIZE_HOST)),)
@@ -638,6 +662,7 @@
FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh
FEC := $(HOST_OUT_EXECUTABLES)/fec
+BRILLO_UPDATE_PAYLOAD := $(HOST_OUT_EXECUTABLES)/brillo_update_payload
DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -656,13 +681,21 @@
# We may not have the right JAVA_HOME/PATH set up yet when this is run from envsetup.sh.
ifneq ($(CALLED_FROM_SETUP),true)
-HOST_JDK_TOOLS_JAR:= $(shell $(BUILD_SYSTEM)/find-jdk-tools-jar.sh)
+
+# Path to tools.jar, or empty if EXPERIMENTAL_USE_OPENJDK9 is set
+HOST_JDK_TOOLS_JAR :=
+# TODO: Remove HOST_JDK_TOOLS_JAR and all references to it once OpenJDK 8
+# toolchains are no longer supported (i.e. when what is now
+# EXPERIMENTAL_USE_OPENJDK9 becomes the standard). http://b/38418220
+ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
+HOST_JDK_TOOLS_JAR := $(shell $(BUILD_SYSTEM)/find-jdk-tools-jar.sh)
ifneq ($(HOST_JDK_TOOLS_JAR),)
ifeq ($(wildcard $(HOST_JDK_TOOLS_JAR)),)
$(error Error: could not find jdk tools.jar at $(HOST_JDK_TOOLS_JAR), please check if your JDK was installed correctly)
endif
endif
+endif # ifeq ($(EXPERIMENTAL_USE_OPENJDK9),)
# Is the host JDK 64-bit version?
HOST_JDK_IS_64BIT_VERSION :=
@@ -678,9 +711,13 @@
MD5SUM:=md5sum
endif
-APICHECK_CLASSPATH := $(HOST_JDK_TOOLS_JAR)
-APICHECK_CLASSPATH := $(APICHECK_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-APICHECK_CLASSPATH := $(APICHECK_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX)
+APICHECK_CLASSPATH_ENTRIES := \
+ $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX) \
+ $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX) \
+ $(HOST_JDK_TOOLS_JAR) \
+ )
+APICHECK_CLASSPATH := $(subst $(space),:,$(strip $(APICHECK_CLASSPATH_ENTRIES)))
+
APICHECK_COMMAND := $(APICHECK) -JXmx1024m -J"classpath $(APICHECK_CLASSPATH)"
# Boolean variable determining if Treble is fully enabled
@@ -809,10 +846,10 @@
RS_PREBUILT_CLCORE := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/librsrt_$(TARGET_ARCH).bc
RS_PREBUILT_COMPILER_RT := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/libcompiler_rt.a
ifeq (true,$(TARGET_IS_64_BIT))
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib64 \
- -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/r10/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib64 \
+ -L prebuilts/ndk/r10/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
else
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/r10/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
endif
# API Level lists for Renderscript Compat lib.
@@ -865,8 +902,7 @@
# These goals don't need to collect and include Android.mks/CleanSpec.mks
# in the source tree.
-dont_bother_goals := clean clobber dataclean installclean \
- help out \
+dont_bother_goals := out \
snod systemimage-nodeps \
stnod systemtarball-nodeps \
userdataimage-nodeps userdatatarball-nodeps \
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 729ef48..04aedf4 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -63,7 +63,7 @@
endif
# If CFI is disabled globally, remove it from my_sanitize.
-ifeq ($(strip $(ENABLE_CFI)),)
+ifeq ($(strip $(ENABLE_CFI)),false)
my_sanitize := $(filter-out cfi,$(my_sanitize))
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
@@ -74,6 +74,12 @@
my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
endif
+# Also disable CFI if ASAN is enabled.
+ifneq ($(filter address,$(my_sanitize)),)
+ my_sanitize := $(filter-out cfi,$(my_sanitize))
+ my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
+endif
+
# CFI needs gold linker, and mips toolchain does not have one.
ifneq ($(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/definitions.mk b/core/definitions.mk
index cf877f5..1da169f 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -846,10 +846,10 @@
## Use echo-(warning|error) in a build rule
## Use pretty-(warning|error) instead of $(warning)/$(error)
###########################################################
-ESC_BOLD := \e[1m
-ESC_WARNING := \e[35m
-ESC_ERROR := \e[31m
-ESC_RESET := \e[0m
+ESC_BOLD := \033[1m
+ESC_WARNING := \033[35m
+ESC_ERROR := \033[31m
+ESC_RESET := \033[0m
# $(1): path (and optionally line) information
# $(2): message to print
@@ -1017,12 +1017,15 @@
$(hide) echo >> $2
endef
+# b/37755219
+RS_CC_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0:detect_container_overflow=0
+
define transform-renderscripts-to-java-and-bc
@echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
$(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
$(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/res/raw
$(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/src
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
-o $(PRIVATE_RS_OUTPUT_DIR)/res/raw \
-p $(PRIVATE_RS_OUTPUT_DIR)/src \
-d $(PRIVATE_RS_OUTPUT_DIR) \
@@ -1046,8 +1049,9 @@
$(dir $@)/$(notdir $(<:.bc=.o)) \
$(RS_PREBUILT_COMPILER_RT) \
-o $@ $(TARGET_GLOBAL_LDFLAGS) -Wl,--hash-style=sysv -L prebuilts/gcc/ \
- $(RS_PREBUILT_LIBPATH) -L $(TARGET_OUT_INTERMEDIATE_LIBRARIES) \
- -lRSSupport -lm -lc
+ $(RS_PREBUILT_LIBPATH) \
+ $(call intermediates-dir-for,SHARED_LIBRARIES,libRSSupport)/libRSSupport.so \
+ -lm -lc
endef
###########################################################
@@ -1058,7 +1062,7 @@
@echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
$(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
$(hide) mkdir -p $(PRIVATE_RS_OUTPUT_DIR)/
-$(hide) $(PRIVATE_RS_CC) \
+$(hide) $(RS_CC_ASAN_OPTIONS) $(PRIVATE_RS_CC) \
-o $(PRIVATE_RS_OUTPUT_DIR)/ \
-d $(PRIVATE_RS_OUTPUT_DIR) \
-a $@ -MD \
@@ -2006,6 +2010,9 @@
APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)
endif
+# b/37750224
+AAPT_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
# TODO: Right now we generate the asset resources twice, first as part
# of generating the Java classes, then at the end when packaging the final
# assets. This should be changed to do one of two things: (1) Don't generate
@@ -2020,7 +2027,7 @@
define create-resource-java-files
@mkdir -p $(PRIVATE_SOURCE_INTERMEDIATES_DIR)
@mkdir -p $(dir $(PRIVATE_RESOURCE_PUBLICS_OUTPUT))
-$(hide) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m \
$(eval # PRIVATE_PRODUCT_AAPT_CONFIG is intentionally missing-- see comment.) \
$(addprefix -J , $(PRIVATE_SOURCE_INTERMEDIATES_DIR)) \
$(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2199,9 +2206,9 @@
# $(2): bootclasspath
define compile-java
$(hide) rm -f $@
-$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
$(hide) mkdir -p $(dir $@)
-$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR)
+$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR) $(PRIVATE_ANNO_INTERMEDIATES_DIR)
$(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
$(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
@@ -2214,13 +2221,13 @@
$(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
| $(NORMALIZE_PATH) | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
$(hide) if [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
- $(1) -encoding UTF-8 \
+ $(SOONG_JAVAC_WRAPPER) $(1) -encoding UTF-8 \
$(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
$(2) \
$(addprefix -classpath ,$(strip \
$(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
$(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
- -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
+ -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) -s $(PRIVATE_ANNO_INTERMEDIATES_DIR) \
$(PRIVATE_JAVACFLAGS) \
\@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq \
|| ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
@@ -2402,13 +2409,16 @@
fi
endef
+# b/37756495
+IJAR_ASAN_OPTIONS := ASAN_OPTIONS=detect_leaks=0
+
## Rule to create a table of contents from a .jar file.
## Must be called with $(eval).
# $(1): A .jar file
define _transform-jar-to-toc
$1.toc: $1 | $(IJAR)
@echo Generating TOC: $$@
- $(hide) $(IJAR) $$< $$@.tmp
+ $(hide) $(IJAR_ASAN_OPTIONS) $(IJAR) $$< $$@.tmp
$$(call commit-change-for-toc,$$@)
endef
@@ -2530,11 +2540,17 @@
$(if $(filter $(1),$(PLATFORM_VERSION_CODENAME)),10000,$(1))
endef
+# --add-opens is required because desugar reflects via java.lang.invoke.MethodHandles.Lookup
define desugar-classes-jar
@echo Desugar: $@
@mkdir -p $(dir $@)
$(hide) rm -f $@ $@.tmp
-$(hide) java -jar $(DESUGAR) \
+@rm -rf $(dir $@)/desugar_dumped_classes
+@mkdir $(dir $@)/desugar_dumped_classes
+$(hide) java \
+ $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-opens java.base/java.lang.invoke=ALL-UNNAMED,) \
+ -Djdk.internal.lambda.dumpProxyClasses=$(abspath $(dir $@))/desugar_dumped_classes \
+ -jar $(DESUGAR) \
$(addprefix --bootclasspath_entry ,$(call desugar-bootclasspath,$(PRIVATE_BOOTCLASSPATH))) \
$(addprefix --classpath_entry ,$(PRIVATE_ALL_JAVA_LIBRARIES)) \
--min_sdk_version $(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
@@ -2545,14 +2561,11 @@
endef
-#TODO: use a smaller -Xmx value for most libraries;
-# only core.jar and framework.jar need a heap this big.
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
@mkdir -p $(dir $@)
$(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX) \
- -JXms16M -JXmx2048M \
+$(hide) $(DX_COMMAND) \
--dex --output=$(dir $@) \
--min-sdk-version=$(call codename-or-sdk-to-sdk,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
$(if $(NO_OPTIMIZE_DX), \
@@ -2605,7 +2618,7 @@
#values; applications can override these by explicitly stating
#them in their manifest.
define add-assets-to-package
-$(hide) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
+$(hide) $(AAPT_ASAN_OPTIONS) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
$(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
$(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
$(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
@@ -2689,10 +2702,14 @@
$(call sign-package-arg,$@)
endef
+# signapk uses internal APIs from sun.security.{pkcs,x509}; see http://b/37137869
# $(1): the package file we are signing.
define sign-package-arg
$(hide) mv $(1) $(1).unsigned
-$(hide) java -Djava.library.path=$(SIGNAPK_JNI_LIBRARY_PATH) -jar $(SIGNAPK_JAR) \
+$(hide) java -Djava.library.path=$(SIGNAPK_JNI_LIBRARY_PATH) \
+ $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-exports java.base/sun.security.pkcs=ALL-UNNAMED,) \
+ $(if $(EXPERIMENTAL_USE_OPENJDK9),--add-exports java.base/sun.security.x509=ALL-UNNAMED,) \
+ -jar $(SIGNAPK_JAR) \
$(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
$(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
$(hide) mv $(1).signed $(1)
@@ -3150,52 +3167,15 @@
endef
###########################################################
-# Link type checking
-###########################################################
-define check-link-type
-$(hide) mkdir -p $(dir $@) && rm -f $@
-$(hide) $(CHECK_LINK_TYPE) --makefile $(PRIVATE_MAKEFILE) --module $(PRIVATE_MODULE) \
- --type "$(PRIVATE_LINK_TYPE)" $(addprefix --allowed ,$(PRIVATE_ALLOWED_TYPES)) \
- $(addprefix --warn ,$(PRIVATE_WARN_TYPES)) $(PRIVATE_DEPS)
-$(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-endef
-
-define link-type-partitions
-ifndef LOCAL_IS_HOST_MODULE
-ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
-ifneq ($(filter $(TARGET_OUT_VENDOR)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:vendor
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_OEM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:oem
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_ODM)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:odm
-$(1): PRIVATE_WARN_TYPES += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm
-else ifneq ($(filter $(TARGET_OUT_DATA)/%,$(my_module_path)),)
-$(1): PRIVATE_LINK_TYPE += partition:data
-$(1): PRIVATE_ALLOWED_TYPES += partition:data partition:vendor partition:oem partition:odm
-else
-$(1): PRIVATE_WARN_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-else # uninstallable module
-$(1): PRIVATE_ALLOWED_TYPES += partition:vendor partition:oem partition:odm partition:data
-endif
-endif
-endef
-
-###########################################################
## Compatibility suite tools
###########################################################
-# Return a list of output directories for a given suite and the current LOCAL_MODULE
+# Return a list of output directories for a given suite and the current LOCAL_MODULE.
+# Can be passed a subdirectory to use for the common testcase directory.
define compatibility_suite_dirs
$(strip \
$(COMPATIBILITY_TESTCASES_OUT_$(1)) \
- $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE))
+ $($(my_prefix)OUT_TESTCASES)/$(LOCAL_MODULE)$(2))
endef
# For each suite:
@@ -3205,13 +3185,148 @@
# Requires for each suite: my_compat_dist_$(suite) to be defined.
define create-suite-dependencies
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
- $(eval my_compat_files_$(suite) := $(call copy-many-files, $(my_compat_dist_$(suite)))) \
$(eval COMPATIBILITY.$(suite).FILES := \
- $(COMPATIBILITY.$(suite).FILES) $(my_compat_files_$(suite))) \
- $(eval $(my_all_targets) : $(my_compat_files_$(suite))))
+ $(COMPATIBILITY.$(suite).FILES) $(foreach f,$(my_compat_dist_$(suite)),$(call word-colon,2,$(f))))) \
+$(eval $(my_all_targets) : $(call copy-many-files, \
+ $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))))
endef
###########################################################
+## Path Cleaning
+###########################################################
+
+# Remove "dir .." combinations (but keep ".. ..")
+#
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-dotdot
+$(strip \
+ $(if $(word 2,$(1)),
+ $(if $(call streq,$(word 2,$(1)),..),
+ $(if $(call streq,$(word 1,$(1)),..),
+ $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+ ,
+ $(call _clean-path-strip-dotdot,$(wordlist 3,$(words $(1)),$(1)))
+ )
+ ,
+ $(word 1,$(1)) $(call _clean-path-strip-dotdot,$(wordlist 2,$(words $(1)),$(1)))
+ )
+ ,
+ $(1)
+ )
+)
+endef
+
+# Remove any leading .. from the path (in case of /..)
+#
+# Should only be called if the original path started with /
+# $(1): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-strip-root-dotdots
+$(strip $(if $(call streq,$(firstword $(1)),..),
+ $(call _clean-path-strip-root-dotdots,$(wordlist 2,$(words $(1)),$(1))),
+ $(1)))
+endef
+
+# Call _clean-path-strip-dotdot until the path stops changing
+# $(1): Non-empty if this path started with a /
+# $(2): The expanded path, where / is converted to ' ' to work with $(word)
+define _clean-path-expanded
+$(strip \
+ $(eval _ep := $(call _clean-path-strip-dotdot,$(2)))
+ $(if $(1),$(eval _ep := $(call _clean-path-strip-root-dotdots,$(_ep))))
+ $(if $(call streq,$(2),$(_ep)),
+ $(_ep),
+ $(call _clean-path-expanded,$(1),$(_ep))))
+endef
+
+# Clean the file path -- remove //, dir/.., extra .
+#
+# This should be the same semantics as golang's filepath.Clean
+#
+# $(1): The file path to clean
+define clean-path
+$(strip \
+ $(if $(call streq,$(words $(1)),1),
+ $(eval _rooted := $(filter /%,$(1)))
+ $(eval _expanded_path := $(filter-out .,$(subst /,$(space),$(1))))
+ $(eval _path := $(if $(_rooted),/)$(subst $(space),/,$(call _clean-path-expanded,$(_rooted),$(_expanded_path))))
+ $(if $(_path),
+ $(_path),
+ .
+ )
+ ,
+ $(if $(call streq,$(words $(1)),0),
+ .,
+ $(error Call clean-path with only one path (without spaces))
+ )
+ )
+)
+endef
+
+ifeq ($(TEST_MAKE_clean_path),true)
+ define my_test
+ $(if $(call streq,$(call clean-path,$(1)),$(2)),,
+ $(eval my_failed := true)
+ $(warning clean-path test '$(1)': expected '$(2)', got '$(call clean-path,$(1))'))
+ endef
+ my_failed :=
+
+ # Already clean
+ $(call my_test,abc,abc)
+ $(call my_test,abc/def,abc/def)
+ $(call my_test,a/b/c,a/b/c)
+ $(call my_test,.,.)
+ $(call my_test,..,..)
+ $(call my_test,../..,../..)
+ $(call my_test,../../abc,../../abc)
+ $(call my_test,/abc,/abc)
+ $(call my_test,/,/)
+
+ # Empty is current dir
+ $(call my_test,,.)
+
+ # Remove trailing slash
+ $(call my_test,abc/,abc)
+ $(call my_test,abc/def/,abc/def)
+ $(call my_test,a/b/c/,a/b/c)
+ $(call my_test,./,.)
+ $(call my_test,../,..)
+ $(call my_test,../../,../..)
+ $(call my_test,/abc/,/abc)
+
+ # Remove doubled slash
+ $(call my_test,abc//def//ghi,abc/def/ghi)
+ $(call my_test,//abc,/abc)
+ $(call my_test,///abc,/abc)
+ $(call my_test,//abc//,/abc)
+ $(call my_test,abc//,abc)
+
+ # Remove . elements
+ $(call my_test,abc/./def,abc/def)
+ $(call my_test,/./abc/def,/abc/def)
+ $(call my_test,abc/.,abc)
+
+ # Remove .. elements
+ $(call my_test,abc/def/ghi/../jkl,abc/def/jkl)
+ $(call my_test,abc/def/../ghi/../jkl,abc/jkl)
+ $(call my_test,abc/def/..,abc)
+ $(call my_test,abc/def/../..,.)
+ $(call my_test,/abc/def/../..,/)
+ $(call my_test,abc/def/../../..,..)
+ $(call my_test,/abc/def/../../..,/)
+ $(call my_test,abc/def/../../../ghi/jkl/../../../mno,../../mno)
+ $(call my_test,/../abc,/abc)
+
+ # Combinations
+ $(call my_test,abc/./../def,def)
+ $(call my_test,abc//./../def,def)
+ $(call my_test,abc/../../././../def,../../def)
+
+ ifdef my_failed
+ $(error failed clean-path test)
+ endif
+endif
+
+###########################################################
## Other includes
###########################################################
diff --git a/core/distdir.mk b/core/distdir.mk
index 51ec46e..89c5966 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -17,16 +17,8 @@
# When specifying "dist", the user has asked that we copy the important
# files from this build into DIST_DIR.
-.PHONY: dist
-dist: ;
-
dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-ifeq (,$(strip $(filter-out $(INTERNAL_MODIFIER_TARGETS),$(MAKECMDGOALS))))
-# The commandline was something like "make dist" or "make dist showcommands".
-# Add a dependency on a real target.
-dist: $(DEFAULT_GOAL)
-endif
ifdef dist_goal
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index 2285b2c..27e44a0 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -175,6 +175,7 @@
$(hide) ( \
javadoc \
-encoding UTF-8 \
+ -source 1.8 \
\@$(PRIVATE_SRC_LIST_FILE) \
-J-Xmx1600m \
-J-XX:-OmitStackTraceInFastThrow \
@@ -216,7 +217,7 @@
\@$(PRIVATE_SRC_LIST_FILE) \
-J-Xmx1024m \
-XDignore.symbol.file \
- $(if $(LEGACY_USE_JAVA7),,-Xdoclint:none) \
+ -Xdoclint:none \
$(PRIVATE_PROFILING_OPTIONS) \
$(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
$(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 579338e..949793b 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -23,7 +23,7 @@
# The basename of this target must be the same as the final output
# binary name, because it's used to set the "soname" in the binary.
# The includer of this file will define a rule to build this target.
-linked_module := $(intermediates)/LINKED/$(my_built_module_stem)
+linked_module := $(intermediates)/LINKED/$(notdir $(my_installed_module_stem))
ALL_ORIGINAL_DYNAMIC_BINARIES += $(linked_module)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 67ac751..43593e6 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -173,6 +173,12 @@
TARGET_COPY_OUT_ODM := odm
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
+
+# Returns the non-sanitized version of the path provided in $1.
+define get_non_asan_path
+$(patsubst $(PRODUCT_OUT)/$(TARGET_COPY_OUT_ASAN)/%,$(PRODUCT_OUT)/%,$1)
+endef
+
###########################################
# Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
# we don't know if the device wants to build a separate vendor.img
@@ -384,10 +390,19 @@
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT_EXECUTABLES)
$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_NATIVE_TESTS := $(HOST_CROSS_OUT)/nativetest64
-TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+ TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_asan
+else
+ TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
+endif
TARGET_OUT_HEADERS := $(TARGET_OUT_INTERMEDIATES)/include
TARGET_OUT_INTERMEDIATE_LIBRARIES := $(TARGET_OUT_INTERMEDIATES)/lib
-TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj
+
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+ TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj_asan
+else
+ TARGET_OUT_COMMON_INTERMEDIATES := $(TARGET_COMMON_OUT_ROOT)/obj
+endif
TARGET_OUT_GEN := $(PRODUCT_OUT)/gen
TARGET_OUT_COMMON_GEN := $(TARGET_COMMON_OUT_ROOT)/gen
@@ -429,7 +444,12 @@
else
TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
endif
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
+
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)_asan
+else
+ $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
+endif
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)/lib
ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib/$(TARGET_2ND_ARCH)
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
index f6d6e9a..4d41871 100644
--- a/core/fuzz_test.mk
+++ b/core/fuzz_test.mk
@@ -8,8 +8,8 @@
$(error $(LOCAL_PATH): $(LOCAL_MODULE): NDK fuzz tests are not supported.)
endif
-LOCAL_CFLAGS += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
-LOCAL_STATIC_LIBRARIES += libLLVMFuzzer
+LOCAL_CFLAGS += -fsanitize-coverage=trace-pc-guard
+LOCAL_STATIC_LIBRARIES += libFuzzer
ifdef LOCAL_MODULE_PATH
$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH when building test $(LOCAL_MODULE))
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 34e88ce..7101229 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -86,7 +86,7 @@
ifndef LOCAL_JACK_ENABLED
$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -96,7 +96,10 @@
$(full_java_lib_deps) \
$(jar_manifest_file) \
$(proto_java_sources_file_stamp) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
+ $(annotation_processor_deps) \
+ $(NORMALIZE_PATH) \
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+ | $(SOONG_JAVAC_WRAPPER)
$(transform-host-java-to-package)
my_desugaring :=
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index f1da553..d30c90d 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -64,7 +64,7 @@
endif
$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -74,9 +74,13 @@
$(full_java_lib_deps) \
$(jar_manifest_file) \
$(proto_java_sources_file_stamp) \
+ $(annotation_processor_deps) \
$(NORMALIZE_PATH) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
+ $(ZIPTIME) \
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+ | $(SOONG_JAVAC_WRAPPER)
$(transform-host-java-to-package)
+ $(remove-timestamps-from-package)
javac-check : $(full_classes_compiled_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index c5804a4..0e92153 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -54,7 +54,8 @@
# The jni libaries will be installed to the system.img.
my_jni_filenames := $(notdir $(my_jni_shared_libraries))
# Make sure the JNI libraries get installed
-my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
+my_shared_library_path := $(call get_non_asan_path,\
+ $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES))
# Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
@@ -108,30 +109,18 @@
# Verify that all included libraries are built against the NDK
ifneq ($(strip $(LOCAL_JNI_SHARED_LIBRARIES)),)
-my_link_type := $(call intermediates-dir-for,APPS,$(LOCAL_MODULE))/$(my_2nd_arch_prefix)jni_link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
- $(foreach l,$(LOCAL_JNI_SHARED_LIBRARIES),\
- $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),,,$(my_2nd_arch_prefix))/link_type))
ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := app:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := native:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk
+my_link_type := app:sdk
+my_warn_types := native:platform
+my_allowed_types := native:ndk
else
-$(my_link_type): PRIVATE_LINK_TYPE := app:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := native:ndk native:platform
+my_link_type := app:platform
+my_warn_types :=
+my_allowed_types := native:ndk native:platform
endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
- @echo Check JNI module types: $@
- $(check-link-type)
-$(LOCAL_BUILT_MODULE): | $(my_link_type)
+my_link_deps := $(addprefix SHARED_LIBRARIES:,$(LOCAL_JNI_SHARED_LIBRARIES))
-my_link_type :=
-my_link_type_deps :=
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
endif
diff --git a/core/jack-default.args b/core/jack-default.args
index 0232301..433bc53 100644
--- a/core/jack-default.args
+++ b/core/jack-default.args
@@ -5,3 +5,5 @@
-D jack.reporter.level.file=error=--,warning=-
--verbose error
-D jack.jayce.cache=false
+-D jack.lambda.grouping-scope=package
+-D jack.lambda.simplify-stateless=true
diff --git a/core/java.mk b/core/java.mk
index b31e316..98f6ca7 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -114,6 +114,7 @@
endif
full_classes_compiled_jar := $(intermediates.COMMON)/$(full_classes_compiled_jar_leaf)
+full_classes_processed_jar := $(intermediates.COMMON)/classes-processed.jar
full_classes_desugar_jar := $(intermediates.COMMON)/classes-desugar.jar
jarjar_leaf := classes-jarjar.jar
full_classes_jarjar_jar := $(intermediates.COMMON)/$(jarjar_leaf)
@@ -428,7 +429,7 @@
LOCAL_JAVACFLAGS += $(LOCAL_ERROR_PRONE_FLAGS)
endif
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
@@ -441,25 +442,50 @@
$(layers_file) \
$(RenderScript_file_stamp) \
$(proto_java_sources_file_stamp) \
+ $(annotation_processor_deps) \
$(NORMALIZE_PATH) \
- $(LOCAL_ADDITIONAL_DEPENDENCIES)
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+ | $(SOONG_JAVAC_WRAPPER)
$(transform-java-to-classes.jar)
javac-check : $(full_classes_compiled_jar)
javac-check-$(LOCAL_MODULE) : $(full_classes_compiled_jar)
+ifdef LOCAL_JAR_PROCESSOR
+# LOCAL_JAR_PROCESSOR_ARGS must be evaluated here to set up the rule-local
+# PRIVATE_JAR_PROCESSOR_ARGS variable, but $< and $@ are not available yet.
+# Set ${in} and ${out} so they can be referenced by LOCAL_JAR_PROCESSOR_ARGS
+# using deferred evaluation (LOCAL_JAR_PROCESSOR_ARGS = instead of :=).
+in := $(full_classes_compiled_jar)
+out := $(full_classes_processed_jar).tmp
+$(full_classes_processed_jar): PRIVATE_JAR_PROCESSOR_ARGS := $(LOCAL_JAR_PROCESSOR_ARGS)
+$(full_classes_processed_jar): PRIVATE_JAR_PROCESSOR := $(HOST_OUT_JAVA_LIBRARIES)/$(LOCAL_JAR_PROCESSOR).jar
+$(full_classes_processed_jar): PRIVATE_TMP_OUT := $(out)
+in :=
+out :=
+
+$(full_classes_processed_jar): $(full_classes_compiled_jar) $(LOCAL_JAR_PROCESSOR)
+ @echo Processing $@ with $(PRIVATE_JAR_PROCESSOR)
+ $(hide) rm -f $@ $(PRIVATE_TMP_OUT)
+ $(hide) java -jar $(PRIVATE_JAR_PROCESSOR) $(PRIVATE_JAR_PROCESSOR_ARGS)
+ $(hide) mv $(PRIVATE_TMP_OUT) $@
+
+else
+full_classes_processed_jar := $(full_classes_compiled_jar)
+endif
+
my_desugaring :=
ifndef LOCAL_JACK_ENABLED
ifndef LOCAL_IS_STATIC_JAVA_LIBRARY
my_desugaring := true
$(full_classes_desugar_jar): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(full_classes_desugar_jar): $(full_classes_compiled_jar) $(DESUGAR)
+$(full_classes_desugar_jar): $(full_classes_processed_jar) $(DESUGAR)
$(desugar-classes-jar)
endif
endif
ifndef my_desugaring
-full_classes_desugar_jar := $(full_classes_compiled_jar)
+full_classes_desugar_jar := $(full_classes_processed_jar)
endif
# Run jarjar if necessary
@@ -611,7 +637,7 @@
endif
# If not using jack and building against the current SDK version then filter
-# out junit and android.test classes from the application that are to be
+# out the junit, android.test and c.a.i.u.Predicate classes that are to be
# removed from the Android API as part of b/30188076 but which are still
# present in the Android API. This is to allow changes to be made to the
# build to statically include those classes into the application without
@@ -620,7 +646,7 @@
ifndef LOCAL_JACK_ENABLED
ifdef LOCAL_SDK_VERSION
ifeq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
-proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**)
+proguard_injar_filters := (!junit/framework/**,!junit/runner/**,!android/test/**,!com/android/internal/util/*)
endif
endif
endif
@@ -736,13 +762,14 @@
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS :=
endif # LOCAL_PROGUARD_ENABLED defined
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS) $(annotation_processor_flags)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
$(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
$(common_proguard_flag_files) $(proguard_flag_files) \
- $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
+ $(proto_java_sources_file_stamp) $(annotation_processor_deps) \
+ $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
$(NORMALIZE_PATH) $(JACK_DEFAULT_ARGS) $(JACK)
$(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
diff --git a/core/java_common.mk b/core/java_common.mk
index 03856ac..0859a46 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -19,12 +19,7 @@
ifneq (,$(filter $(LOCAL_SDK_VERSION), $(private_sdk_versions_without_any_java_18_support)))
LOCAL_JAVA_LANGUAGE_VERSION := 1.7
else
- # This retains 1.7 for ART build bots only. http://b/27583810
- ifeq (,$(LEGACY_USE_JAVA7))
- LOCAL_JAVA_LANGUAGE_VERSION := 1.8
- else
- LOCAL_JAVA_LANGUAGE_VERSION := 1.7
- endif
+ LOCAL_JAVA_LANGUAGE_VERSION := 1.8
endif
endif
LOCAL_JAVACFLAGS += -source $(LOCAL_JAVA_LANGUAGE_VERSION) -target $(LOCAL_JAVA_LANGUAGE_VERSION)
@@ -153,6 +148,20 @@
need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
ifdef need_compile_java
+annotation_processor_flags :=
+annotation_processor_deps :=
+
+ifdef LOCAL_ANNOTATION_PROCESSORS
+ annotation_processor_jars := $(call java-lib-deps,$(LOCAL_ANNOTATION_PROCESSORS),true)
+ annotation_processor_flags += -processorpath $(call normalize-path-list,$(annotation_processor_jars))
+ annotation_processor_deps += $(annotation_processor_jars)
+
+ # b/25860419: annotation processors must be explicitly specified for grok
+ annotation_processor_flags += $(foreach class,$(LOCAL_ANNOTATION_PROCESSOR_CLASSES),-processor $(class))
+
+ annotation_processor_jars :=
+endif
+
full_static_java_libs := \
$(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
$(call intermediates-dir-for, \
@@ -164,6 +173,7 @@
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANNO_INTERMEDIATES_DIR := $(intermediates.COMMON)/anno
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_PROTO_SOURCES := $(if $(proto_sources),true)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/proto
@@ -386,35 +396,24 @@
# Verify that all libraries are safe to use
###########################################################
ifndef LOCAL_IS_HOST_MODULE
-my_link_type := $(intermediates.COMMON)/link_type
-all_link_types: $(my_link_type)
-my_link_type_deps := $(strip \
- $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES),\
- $(call intermediates-dir-for, \
- JAVA_LIBRARIES,$(lib),,COMMON)/link_type) \
- $(foreach lib,$(apk_libraries), \
- $(call intermediates-dir-for, \
- APPS,$(lib),,COMMON)/link_type))
ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
-$(my_link_type): PRIVATE_WARN_TYPES := java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system
+my_link_type := java:system
+my_warn_types := java:platform
+my_allowed_types := java:sdk java:system
else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
-$(my_link_type): PRIVATE_WARN_TYPES := java:system java:platform
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk
+my_link_type := java:sdk
+my_warn_types := java:system java:platform
+my_allowed_types := java:sdk
else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
-$(my_link_type): PRIVATE_WARN_TYPES :=
-$(my_link_type): PRIVATE_ALLOWED_TYPES := java:sdk java:system java:platform
+my_link_type := java:platform
+my_warn_types :=
+my_allowed_types := java:sdk java:system java:platform
endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type): PRIVATE_DEPS := $(my_link_type_deps)
-$(my_link_type): PRIVATE_MODULE := $(LOCAL_MODULE)
-$(my_link_type): PRIVATE_MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
-$(my_link_type): $(my_link_type_deps) $(CHECK_LINK_TYPE)
- @echo Check Java library module types: $@
- $(check-link-type)
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+my_link_deps := $(addprefix JAVA_LIBRARIES:,$(LOCAL_STATIC_JAVA_LIBRARIES))
+my_link_deps += $(addprefix APPS:,$(apk_libraries))
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
endif # !LOCAL_IS_HOST_MODULE
diff --git a/core/link_type.mk b/core/link_type.mk
new file mode 100644
index 0000000..ff525cb
--- /dev/null
+++ b/core/link_type.mk
@@ -0,0 +1,27 @@
+# Inputs:
+# LOCAL_MODULE_CLASS, LOCAL_MODULE, LOCAL_MODULE_MAKEFILE, LOCAL_BUILT_MODULE
+# from base_rules.mk: my_kind, my_host_cross
+# my_common: empty or COMMON, like the argument to intermediates-dir-for
+# my_2nd_arch_prefix: usually LOCAL_2ND_ARCH_VAR_PREFIX, separate for JNI installation
+#
+# my_link_type: the tags to apply to this module
+# my_warn_types: the tags to warn about in our dependencies
+# my_allowed_types: the tags to allow in our dependencies
+# my_link_deps: the dependencies, in the form of <MODULE_CLASS>:<name>
+#
+
+my_link_prefix := LINK_TYPE:$(call find-idf-prefix,$(my_kind),$(my_host_cross)):$(if $(my_common),$(my_common):_,_:$(if $(my_2nd_arch_prefix),$(my_2nd_arch_prefix),_))
+link_type := $(my_link_prefix):$(LOCAL_MODULE_CLASS):$(LOCAL_MODULE)
+ALL_LINK_TYPES := $(ALL_LINK_TYPES) $(link_type)
+$(link_type).TYPE := $(my_link_type)
+$(link_type).MAKEFILE := $(LOCAL_MODULE_MAKEFILE)
+$(link_type).WARN := $(my_warn_types)
+$(link_type).ALLOWED := $(my_allowed_types)
+$(link_type).DEPS := $(addprefix $(my_link_prefix):,$(my_link_deps))
+$(link_type).BUILT := $(LOCAL_BUILT_MODULE)
+
+link_type :=
+my_allowed_types :=
+my_link_prefix :=
+my_link_type :=
+my_warn_types :=
diff --git a/core/main.mk b/core/main.mk
index f51070b..b50e0e7 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -88,150 +88,10 @@
# Include the google-specific config
-include vendor/google/build/config.mk
-VERSION_CHECK_SEQUENCE_NUMBER := 6
-JAVA_NOT_REQUIRED_CHECKED :=
--include $(OUT_DIR)/versions_checked.mk
-ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER)$(JAVA_NOT_REQUIRED),$(VERSIONS_CHECKED)$(JAVA_NOT_REQUIRED_CHECKED))
-
-$(info Checking build tools versions...)
-
-# check for a case sensitive file system
-ifneq (a,$(shell mkdir -p $(OUT_DIR) ; \
- echo a > $(OUT_DIR)/casecheck.txt; \
- echo B > $(OUT_DIR)/CaseCheck.txt; \
- cat $(OUT_DIR)/casecheck.txt))
-$(warning ************************************************************)
-$(warning You are building on a case-insensitive filesystem.)
-$(warning Please move your source tree to a case-sensitive filesystem.)
-$(warning ************************************************************)
-$(error Case-insensitive filesystems not supported)
-endif
-
-# Make sure that there are no spaces in the absolute path; the
-# build system can't deal with them.
-ifneq ($(words $(shell pwd)),1)
-$(warning ************************************************************)
-$(warning You are building in a directory whose absolute path contains)
-$(warning a space character:)
-$(warning $(space))
-$(warning "$(shell pwd)")
-$(warning $(space))
-$(warning Please move your source tree to a path that does not contain)
-$(warning any spaces.)
-$(warning ************************************************************)
-$(error Directory names containing spaces not supported)
-endif
-
-ifneq ($(JAVA_NOT_REQUIRED),true)
-java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
-javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
-
-# Check for the correct version of java, should be 1.8 by
-# default and only 1.7 if LEGACY_USE_JAVA7 is set.
-ifeq ($(LEGACY_USE_JAVA7),) # if LEGACY_USE_JAVA7 == ''
-required_version := "1.8.x"
-required_javac_version := "1.8"
-java_version := $(shell echo '$(java_version_str)' | grep '[ "]1\.8[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.8[\. "$$]')
-else
-required_version := "1.7.x"
-required_javac_version := "1.7"
-java_version := $(shell echo '$(java_version_str)' | grep '^java .*[ "]1\.7[\. "$$]')
-javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.7[\. "$$]')
-endif # if LEGACY_USE_JAVA7 == ''
-
-ifeq ($(strip $(java_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of java.)
-$(info $(space))
-$(info Your version is: $(java_version_str).)
-$(info The required version is: $(required_version))
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/initializing.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-# Check for the current JDK.
-#
-# For Java 1.7/1.8, we require OpenJDK on linux and Oracle JDK on Mac OS.
-requires_openjdk := false
-ifeq ($(BUILD_OS),linux)
-requires_openjdk := true
-endif
-
-
-# Check for the current jdk
-ifeq ($(requires_openjdk), true)
-# The user asked for openjdk, so check that the host
-# java version is really openjdk and not some other JDK.
-ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You asked for an OpenJDK based build but your version is)
-$(info $(java_version_str).)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not OpenJdk
-else # if requires_openjdk
-ifneq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
-$(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info You use OpenJDK but only Sun/Oracle JDK is supported.)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif # java version is not Sun Oracle JDK
-endif # if requires_openjdk
-
-KNOWN_INCOMPATIBLE_JAVAC_VERSIONS := google
-incompat_javac := $(foreach v,$(KNOWN_INCOMPATIBLE_JAVAC_VERSIONS),$(findstring $(v),$(javac_version_str)))
-ifneq ($(incompat_javac),)
-javac_version :=
-endif
-
-# Check for the correct version of javac
-ifeq ($(strip $(javac_version)),)
-$(info ************************************************************)
-$(info You are attempting to build with the incorrect version)
-$(info of javac.)
-$(info $(space))
-$(info Your version is: $(javac_version_str).)
-ifneq ($(incompat_javac),)
-$(info This '$(incompat_javac)' version is not supported for Android platform builds.)
-$(info Use a publicly available JDK and make sure you have run envsetup.sh / lunch.)
-else
-$(info The required version is: $(required_javac_version))
-endif
-$(info $(space))
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
-$(info ************************************************************)
-$(error stop)
-endif
-
-endif # if JAVA_NOT_REQUIRED
-
-ifndef BUILD_EMULATOR
- # Emulator binaries are now provided under prebuilts/android-emulator/
- BUILD_EMULATOR := false
-endif
-
-$(shell echo 'VERSIONS_CHECKED := $(VERSION_CHECK_SEQUENCE_NUMBER)' \
- > $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'BUILD_EMULATOR ?= $(BUILD_EMULATOR)' \
- >> $(OUT_DIR)/versions_checked.mk)
-$(shell echo 'JAVA_NOT_REQUIRED_CHECKED := $(JAVA_NOT_REQUIRED)' \
- >> $(OUT_DIR)/versions_checked.mk)
-endif
-
# These are the modifier targets that don't do anything themselves, but
# change the behavior of the build.
# (must be defined before including definitions.make)
-INTERNAL_MODIFIER_TARGETS := showcommands all
+INTERNAL_MODIFIER_TARGETS := all
# EMMA_INSTRUMENT_STATIC merges the static emma library to each emma-enabled module.
ifeq (true,$(EMMA_INSTRUMENT_STATIC))
@@ -451,8 +311,8 @@
ADDITIONAL_BUILD_PROPERTIES += net.bt.name=Android
-# enable vm tracing in files for now to help track
-# the cause of ANRs in the content process
+# Sets the location that the runtime dumps stack traces to when signalled
+# with SIGQUIT. Stack trace dumping is turned on for all android builds.
ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.stack-trace-file=/data/anr/traces.txt
# ------------------------------------------------------------
@@ -474,7 +334,7 @@
endif
-# If they only used the modifier goals (showcommands, etc), we'll actually
+# If they only used the modifier goals (all, etc), we'll actually
# build the default target.
ifeq ($(filter-out $(INTERNAL_MODIFIER_TARGETS),$(MAKECMDGOALS)),)
.PHONY: $(INTERNAL_MODIFIER_TARGETS)
@@ -557,6 +417,8 @@
include $(BUILD_SYSTEM)/pdk_fusion_modules.mk
endif # PDK_FUSION_PLATFORM_ZIP
+droid_targets : blueprint_tools
+
endif # dont_bother
endif # ONE_SHOT_MAKEFILE
@@ -724,6 +586,168 @@
deps :=
add-required-deps :=
+################################################################################
+# Link type checking
+#
+# ALL_LINK_TYPES contains a list of all link type prefixes (generally one per
+# module, but APKs can "link" to both java and native code). The link type
+# prefix consists of all the information needed by intermediates-dir-for:
+#
+# LINK_TYPE:TARGET:_:2ND:STATIC_LIBRARIES:libfoo
+#
+# 1: LINK_TYPE literal
+# 2: prefix
+# - TARGET
+# - HOST
+# - HOST_CROSS
+# - AUX
+# 3: Whether to use the common intermediates directory or not
+# - _
+# - COMMON
+# 4: Whether it's the second arch or not
+# - _
+# - 2ND_
+# 5: Module Class
+# - STATIC_LIBRARIES
+# - SHARED_LIBRARIES
+# - ...
+# 6: Module Name
+#
+# Then fields under that are separated by a period and the field name:
+# - TYPE: the link types for this module
+# - MAKEFILE: Where this module was defined
+# - BUILT: The built module location
+# - DEPS: the link type prefixes for the module's dependencies
+# - ALLOWED: the link types to allow in this module's dependencies
+# - WARN: the link types to warn about in this module's dependencies
+#
+# All of the dependency link types not listed in ALLOWED or WARN will become
+# errors.
+################################################################################
+
+link_type_error :=
+
+define link-type-prefix
+$(word 2,$(subst :,$(space),$(1)))
+endef
+define link-type-common
+$(patsubst _,,$(word 3,$(subst :,$(space),$(1))))
+endef
+define link-type-2ndarchprefix
+$(patsubst _,,$(word 4,$(subst :,$(space),$(1))))
+endef
+define link-type-class
+$(word 5,$(subst :,$(space),$(1)))
+endef
+define link-type-name
+$(word 6,$(subst :,$(space),$(1)))
+endef
+define link-type-os
+$(strip $(eval _p := $(link-type-prefix))\
+ $(if $(filter HOST HOST_CROSS,$(_p)),\
+ $($(_p)_OS),\
+ $(if $(filter AUX,$(_p)),AUX,android)))
+endef
+define link-type-arch
+$($(link-type-prefix)_$(link-type-2ndarchprefix)ARCH)
+endef
+define link-type-name-variant
+$(link-type-name) ($(link-type-class) $(link-type-os)-$(link-type-arch))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-warning
+$(shell $(call echo-warning,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) should not link against $(call link-type-name,$(2)) ($(3))"))
+endef
+
+# $(1): the prefix of the module doing the linking
+# $(2): the prefix of the linked module
+define link-type-error
+$(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name,$(1)) ($($(1).TYPE)) can not link against $(call link-type-name,$(2)) ($(3))"))\
+$(eval link_type_error := true)
+endef
+
+link-type-missing :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+ # Print an error message if the linked-to module is missing
+ # $(1): the prefix of the module doing the linking
+ # $(2): the prefix of the missing module
+ define link-type-missing
+ $(shell $(call echo-error,$($(1).MAKEFILE),"$(call link-type-name-variant,$(1)) missing $(call link-type-name-variant,$(2))"))\
+ $(eval available_variants := $(filter %:$(call link-type-name,$(2)),$(ALL_LINK_TYPES)))\
+ $(if $(available_variants),\
+ $(info Available variants:)\
+ $(foreach v,$(available_variants),$(info $(space)$(space)$(call link-type-name-variant,$(v)))))\
+ $(info You can set ALLOW_MISSING_DEPENDENCIES=true in your environment if this is intentional, but that may defer real problems until later in the build.)\
+ $(eval link_type_error := true)
+ endef
+else
+ define link-type-missing
+ $(eval $$(1).MISSING := true)
+ endef
+endif
+
+# Verify that $(1) can link against $(2)
+# Both $(1) and $(2) are the link type prefix defined above
+define verify-link-type
+$(foreach t,$($(2).TYPE),\
+ $(if $(filter-out $($(1).ALLOWED),$(t)),\
+ $(if $(filter $(t),$($(1).WARN)),\
+ $(call link-type-warning,$(1),$(2),$(t)),\
+ $(call link-type-error,$(1),$(2),$(t)))))
+endef
+
+# TODO: Verify all branches/configs have reasonable warnings/errors, and remove
+# these overrides
+link-type-missing = $(eval $$(1).MISSING := true)
+verify-link-type = $(eval $$(1).MISSING := true)
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+ $(foreach d,$($(lt).DEPS),\
+ $(if $($(d).TYPE),\
+ $(call verify-link-type,$(lt),$(d)),\
+ $(call link-type-missing,$(lt),$(d)))))
+
+ifdef link_type_error
+ $(error exiting from previous errors)
+endif
+
+# The intermediate filename for link type rules
+#
+# APPS are special -- they have up to three different rules:
+# 1. The COMMON rule for Java libraries
+# 2. The jni_link_type rule for embedded native code
+# 3. The 2ND_jni_link_type for the second architecture native code
+define link-type-file
+$(call intermediates-dir-for,$(link-type-class),$(link-type-name),$(filter AUX HOST HOST_CROSS,$(link-type-prefix)),$(link-type-common),$(link-type-2ndarchprefix),$(filter HOST_CROSS,$(link-type-prefix)))/$(if $(filter APPS,$(link-type-class)),$(if $(link-type-common),,$(link-type-2ndarchprefix)jni_))link_type
+endef
+
+# Write out the file-based link_type rules for the ALLOW_MISSING_DEPENDENCIES
+# case. We always need to write the file for mm to work, but only need to
+# check it if we weren't able to check it when reading the Android.mk files.
+define link-type-file-rule
+my_link_type_deps := $(foreach l,$($(1).DEPS),$(call link-type-file,$(l)))
+my_link_type_file := $(call link-type-file,$(1))
+$($(1).BUILT): | $$(my_link_type_file)
+$$(my_link_type_file): PRIVATE_DEPS := $$(my_link_type_deps)
+ifeq ($($(1).MISSING),true)
+$$(my_link_type_file): $(CHECK_LINK_TYPE)
+endif
+$$(my_link_type_file): $$(my_link_type_deps)
+ @echo Check module type: $$@
+ $$(hide) mkdir -p $$(dir $$@) && rm -f $$@
+ifeq ($($(1).MISSING),true)
+ $$(hide) $(CHECK_LINK_TYPE) --makefile $($(1).MAKEFILE) --module $(link-type-name) \
+ --type "$($(1).TYPE)" $(addprefix --allowed ,$($(1).ALLOWED)) \
+ $(addprefix --warn ,$($(1).WARN)) $$(PRIVATE_DEPS)
+endif
+ $$(hide) echo "$($(1).TYPE)" >$$@
+endef
+
+$(foreach lt,$(ALL_LINK_TYPES),\
+ $(eval $(call link-type-file-rule,$(lt))))
+
# -------------------------------------------------------------------
# Figure out our module sets.
#
@@ -1106,16 +1130,6 @@
.PHONY: findbugs
findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
-.PHONY: clean
-clean:
- @rm -rf $(OUT_DIR)/*
- @echo "Entire build directory removed."
-
-.PHONY: clobber
-clobber: clean
-
-# The rules for dataclean and installclean are defined in cleanbuild.mk.
-
#xxx scrape this from ALL_MODULE_NAME_TAGS
.PHONY: modules
modules:
@@ -1123,10 +1137,6 @@
@echo "$(call module-names-for-tag-list,$(ALL_MODULE_TAGS))" | \
tr -s ' ' '\n' | sort -u | $(COLUMN)
-.PHONY: showcommands
-showcommands:
- @echo >/dev/null
-
.PHONY: nothing
nothing:
@echo Successfully read the makefiles.
@@ -1138,7 +1148,4 @@
ndk: $(SOONG_OUT_DIR)/ndk.timestamp
.PHONY: ndk
-.PHONY: all_link_types
-all_link_types:
-
endif # KATI
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index f456b8b..ca2dcee 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -7,18 +7,14 @@
KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
# Modifier goals we don't need to pass to Ninja.
-NINJA_EXCLUDE_GOALS := showcommands all dist
-.PHONY : $(NINJA_EXCLUDE_GOALS)
+NINJA_EXCLUDE_GOALS := all dist APP-% PRODUCT-%
# A list of goals which affect parsing of makefiles and we need to pass to Kati.
PARSE_TIME_MAKE_GOALS := \
$(PARSE_TIME_MAKE_GOALS) \
$(dont_bother_goals) \
all \
- APP-% \
- DUMP_% \
ECLIPSE-% \
- PRODUCT-% \
AUX-% \
boottarball-nodeps \
brillo_tests \
@@ -67,5 +63,8 @@
$(sort $(ORIGINAL_MAKECMDGOALS) $(MAKECMDGOALS)))
# Goals we need to pass to Ninja.
NINJA_GOALS := $(filter-out $(NINJA_EXCLUDE_GOALS), $(ANDROID_GOALS))
+ifndef NINJA_GOALS
+ NINJA_GOALS := droid
+endif
# Goals we need to pass to Kati.
-KATI_GOALS := $(filter $(PARSE_TIME_MAKE_GOALS), $(ANDROID_GOALS))
+KATI_GOALS := $(filter $(PARSE_TIME_MAKE_GOALS), $(ANDROID_GOALS))
diff --git a/core/package.mk b/core/package.mk
index 4fe058d..f3713fc 100644
--- a/core/package.mk
+++ b/core/package.mk
@@ -4,13 +4,15 @@
$(call record-module-type,PACKAGE)
-ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
-LOCAL_MULTILIB := first
-endif
-
my_prefix := TARGET_
include $(BUILD_SYSTEM)/multilib.mk
+ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
+ ifneq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(my_module_multilib),|64)
+ my_module_multilib := first
+ endif
+endif
+
ifeq ($(TARGET_SUPPORTS_32_BIT_APPS)|$(TARGET_SUPPORTS_64_BIT_APPS),true|true)
# packages default to building for either architecture,
# the preferred if its supported, otherwise the non-preferred.
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 242203b..4003aaf 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -88,6 +88,7 @@
LOCAL_RESOURCE_DIR := $(LOCAL_PATH)/res
else
need_compile_res := true
+ LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
endif
package_resource_overlays := $(strip \
@@ -374,7 +375,7 @@
ifdef LOCAL_PACKAGE_SPLITS
my_apk_split_configs := $(LOCAL_PACKAGE_SPLITS)
my_split_suffixes := $(subst $(comma),_,$(my_apk_split_configs))
-built_apk_splits := $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk)
+built_apk_splits := $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk)
installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
endif
@@ -654,7 +655,7 @@
# That way the build system will rerun the aapt after the user changes the splitting parameters.
$(built_apk_splits): PRIVATE_PRIVATE_KEY := $(private_key)
$(built_apk_splits): PRIVATE_CERTIFICATE := $(certificate)
-$(built_apk_splits) : $(built_module_path)/%.apk : $(LOCAL_BUILT_MODULE)
+$(built_apk_splits) : $(intermediates)/%.apk : $(LOCAL_BUILT_MODULE)
$(hide) if [ ! -f $@ ]; then \
echo 'No $@ generated, check your apk splitting parameters.' 1>&2; \
rm $<; exit 1; \
@@ -662,14 +663,14 @@
$(sign-package)
# Rules to install the splits
-$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(intermediates)/package_%.apk
@echo "Install: $@"
$(copy-file-to-new-target)
# Register the additional built and installed files.
ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
- $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+ $(foreach s,$(my_split_suffixes),$(intermediates)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
# Make sure to install the splits when you run "make <module_name>".
$(my_all_targets): $(installed_apk_splits)
@@ -679,7 +680,7 @@
$(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
$(eval my_compat_dist_$(suite) := $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
$(foreach s,$(my_split_suffixes),\
- $(built_module_path)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
+ $(intermediates)/package_$(s).apk:$(dir)/$(LOCAL_MODULE)_$(s).apk))))
$(call create-suite-dependencies)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 8a5470e..c12084f 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -148,21 +148,20 @@
endif
export_cflags :=
-my_link_type := $(intermediates)/link_type
ifdef LOCAL_SDK_VERSION
-$(my_link_type): PRIVATE_LINK_TYPE := native:ndk
+my_link_type := native:ndk
else ifdef LOCAL_USE_VNDK
-$(my_link_type): PRIVATE_LINK_TYPE := native:vendor
+my_link_type := native:vendor
else
-$(my_link_type): PRIVATE_LINK_TYPE := native:platform
+my_link_type := native:platform
endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
- @echo Check module type: $@
- $(hide) mkdir -p $(dir $@) && rm -f $@
- $(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-$(LOCAL_BUILT_MODULE) : | $(export_includes) $(my_link_type)
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common :=
+include $(BUILD_SYSTEM)/link_type.mk
endif # prebuilt_module_is_a_library
# The real dependency will be added after all Android.mks are loaded and the install paths
@@ -371,7 +370,7 @@
## Install split apks.
ifdef LOCAL_PACKAGE_SPLITS
# LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
-built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+built_apk_splits := $(addprefix $(intermediates)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
# Rules to sign the split apks.
@@ -384,19 +383,19 @@
$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk
+$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
$(copy-file-to-new-target)
$(sign-package)
# Rules to install the split apks.
-$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(intermediates)/%.apk
@echo "Install: $@"
$(copy-file-to-new-target)
# Register the additional built and installed files.
ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
- $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(built_module_path)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+ $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(intermediates)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
# Make sure to install the splits when you run "make <module_name>".
$(my_all_targets): $(installed_apk_splits)
@@ -471,20 +470,20 @@
$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
$(common_classes_jar) $(common_classes_pre_proguard_jar) $(common_javalib_jar): PRIVATE_PREFIX := $(my_prefix)
-my_link_type := $(intermediates.COMMON)/link_type
ifeq ($(LOCAL_SDK_VERSION),system_current)
-$(my_link_type): PRIVATE_LINK_TYPE := java:system
+my_link_type := java:system
else ifneq ($(LOCAL_SDK_VERSION),)
-$(my_link_type): PRIVATE_LINK_TYPE := java:sdk
+my_link_type := java:sdk
else
-$(my_link_type): PRIVATE_LINK_TYPE := java:platform
+my_link_type := java:platform
endif
-$(eval $(call link-type-partitions,$(my_link_type)))
-$(my_link_type):
- @echo Check module type: $@
- $(hide) mkdir -p $(dir $@) && rm -f $@
- $(hide) echo "$(PRIVATE_LINK_TYPE)" >$@
-$(LOCAL_BUILT_MODULE): $(my_link_type)
+
+# TODO: check dependencies of prebuilt files
+my_link_deps :=
+
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+my_common := COMMON
+include $(BUILD_SYSTEM)/link_type.mk
ifeq ($(prebuilt_module_is_dex_javalib),true)
# For prebuilt shared Java library we don't have classes.jar.
@@ -499,7 +498,7 @@
my_src_jar := $(intermediates.COMMON)/aar/classes.jar
$(my_src_jar) : $(my_src_aar)
- $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
+ $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(dir $@)/res
$(hide) unzip -qo -d $(dir $@) $<
# Make sure the extracted classes.jar has a new timestamp.
$(hide) touch $@
@@ -519,12 +518,34 @@
ifdef LOCAL_USE_AAPT2
ifneq ($(my_src_aar),)
+LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
+ifeq ($(LOCAL_SDK_RES_VERSION),)
+ LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
+endif
+
+framework_res_package_export :=
+framework_res_package_export_deps :=
+# Please refer to package.mk
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+framework_res_package_export := \
+ $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
+framework_res_package_export_deps := $(framework_res_package_export)
+else
+framework_res_package_export := \
+ $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
+framework_res_package_export_deps := \
+ $(dir $(framework_res_package_export))src/R.stamp
+endif
+endif
+
my_res_package := $(intermediates.COMMON)/package-res.apk
# We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
$(my_res_package): PRIVATE_AAPT2_CFLAGS :=
+$(my_res_package): PRIVATE_AAPT_FLAGS := --static-lib --no-static-lib-packages
$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(intermediates.COMMON)/aar/AndroidManifest.xml
-$(my_res_package): PRIVATE_AAPT_INCLUDES :=
+$(my_res_package): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
$(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
@@ -532,11 +553,12 @@
$(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
$(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
$(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
+$(my_res_package) : $(framework_res_package_export_deps)
full_android_manifest :=
my_res_resources :=
my_overlay_resources :=
-my_compiled_res_base_dir :=
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res
R_file_stamp :=
proguard_options_file :=
my_generated_res_dirs := $(intermediates.COMMON)/aar/res
diff --git a/core/product_config.mk b/core/product_config.mk
index e069ff1..3623aa6 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -88,6 +88,7 @@
# Provide "PRODUCT-<prodname>-<goal>" targets, which lets you build
# a particular configuration without needing to set up the environment.
#
+ifndef KATI
product_goals := $(strip $(filter PRODUCT-%,$(MAKECMDGOALS)))
ifdef product_goals
# Scrape the product and build names out of the goal,
@@ -113,54 +114,42 @@
$(error "tests" has been deprecated as a build variant. Use it as a build goal instead.)
endif
- # The build server wants to do make PRODUCT-dream-installclean
- # which really means TARGET_PRODUCT=dream make installclean.
+ # The build server wants to do make PRODUCT-dream-sdk
+ # which really means TARGET_PRODUCT=dream make sdk.
ifneq ($(filter-out $(INTERNAL_VALID_VARIANTS),$(TARGET_BUILD_VARIANT)),)
- MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
+ override MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
TARGET_BUILD_VARIANT := userdebug
default_goal_substitution :=
else
- default_goal_substitution := $(DEFAULT_GOAL)
+ default_goal_substitution := droid
endif
# Replace the PRODUCT-* goal with the build goal that it refers to.
# Note that this will ensure that it appears in the same relative
# position, in case it matters.
- #
- # Note that modifying this will not affect the goals that make will
- # attempt to build, but it's important because we inspect this value
- # in certain situations (like for "make sdk").
- #
- MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
-
- # Define a rule for the PRODUCT-* goal, and make it depend on the
- # patched-up command-line goals as well as any other goals that we
- # want to force.
- #
-.PHONY: $(goal_name)
-$(goal_name): $(MAKECMDGOALS)
+ override MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
endif
+endif # !KATI
# else: Use the value set in the environment or buildspec.mk.
# ---------------------------------------------------------------
# Provide "APP-<appname>" targets, which lets you build
# an unbundled app.
#
+ifndef KATI
unbundled_goals := $(strip $(filter APP-%,$(MAKECMDGOALS)))
ifdef unbundled_goals
ifneq ($(words $(unbundled_goals)),1)
$(error Only one APP-* goal may be specified; saw "$(unbundled_goals)")
endif
TARGET_BUILD_APPS := $(strip $(subst -, ,$(patsubst APP-%,%,$(unbundled_goals))))
- ifneq ($(filter $(DEFAULT_GOAL),$(MAKECMDGOALS)),)
- MAKECMDGOALS := $(patsubst $(unbundled_goals),,$(MAKECMDGOALS))
+ ifneq ($(filter droid,$(MAKECMDGOALS)),)
+ override MAKECMDGOALS := $(patsubst $(unbundled_goals),,$(MAKECMDGOALS))
else
- MAKECMDGOALS := $(patsubst $(unbundled_goals),$(DEFAULT_GOAL),$(MAKECMDGOALS))
+ override MAKECMDGOALS := $(patsubst $(unbundled_goals),droid,$(MAKECMDGOALS))
endif
-
-.PHONY: $(unbundled_goals)
-$(unbundled_goals): $(MAKECMDGOALS)
endif # unbundled_goals
+endif
# Default to building dalvikvm on hosts that support it...
ifeq ($(HOST_OS),linux)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a075c96..e21083d 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -13,6 +13,17 @@
endif
endif
+# Converts a list to a JSON list.
+# $1: List separator.
+# $2: List.
+_json_list = [$(if $(2),"$(subst $(1),"$(comma)",$(2))")]
+
+# Converts a space-separated list to a JSON list.
+json_list = $(call _json_list,$(space),$(1))
+
+# Converts a comma-separated list to a JSON list.
+csv_to_json_list = $(call _json_list,$(comma),$(1))
+
# Create soong.variables with copies of makefile settings. Runs every build,
# but only updates soong.variables if it changes
SOONG_VARIABLES_TMP := $(SOONG_VARIABLES).$$$$
@@ -23,13 +34,14 @@
echo ' "Make_suffix": "-$(TARGET_PRODUCT)",'; \
echo ''; \
echo ' "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
+ echo ' "Platform_version_all_codenames": $(call csv_to_json_list,$(PLATFORM_VERSION_ALL_CODENAMES)),'; \
echo ' "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
echo ' "Brillo": $(if $(BRILLO),true,false),'; \
echo ' "Malloc_not_svelte": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
- echo ' "Allow_missing_dependencies": $(if $(TARGET_BUILD_APPS)$(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),true,false),'; \
- echo ' "SanitizeHost": [$(if $(SANITIZE_HOST),"$(subst $(space),"$(comma)",$(SANITIZE_HOST))")],'; \
- echo ' "SanitizeDevice": [$(if $(SANITIZE_TARGET),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET))")],'; \
- echo ' "SanitizeDeviceArch": [$(if $(SANITIZE_TARGET_ARCH),"$(subst $(space),"$(comma)",$(SANITIZE_TARGET_ARCH))")],'; \
+ echo ' "Allow_missing_dependencies": $(if $(ALLOW_MISSING_DEPENDENCIES),true,false),'; \
+ echo ' "SanitizeHost": $(call json_list,$(SANITIZE_HOST)),'; \
+ echo ' "SanitizeDevice": $(call json_list,$(SANITIZE_TARGET)),'; \
+ echo ' "SanitizeDeviceArch": $(call json_list,$(SANITIZE_TARGET_ARCH)),'; \
echo ' "HostStaticBinaries": $(if $(strip $(BUILD_HOST_static)),true,false),'; \
echo ' "Binder32bit": $(if $(BINDER32BIT),true,false),'; \
echo ' "DevicePrefer32BitExecutables": $(if $(filter true,$(TARGET_PREFER_32_BIT_EXECUTABLES)),true,false),'; \
@@ -42,8 +54,8 @@
echo ' "TidyChecks": "$(WITH_TIDY_CHECKS)",'; \
echo ''; \
echo ' "NativeCoverage": $(if $(filter true,$(NATIVE_COVERAGE)),true,false),'; \
- echo ' "CoveragePaths": [$(if $(COVERAGE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_PATHS)))")],'; \
- echo ' "CoverageExcludePaths": [$(if $(COVERAGE_EXCLUDE_PATHS),"$(subst $(space),"$(comma)",$(subst $(comma),$(space),$(COVERAGE_EXCLUDE_PATHS)))")],'; \
+ echo ' "CoveragePaths": $(call csv_to_json_list,$(COVERAGE_PATHS)),'; \
+ echo ' "CoverageExcludePaths": $(call csv_to_json_list,$(COVERAGE_EXCLUDE_PATHS)),'; \
echo ''; \
echo ' "DeviceName": "$(TARGET_DEVICE)",'; \
echo ' "DeviceArch": "$(TARGET_ARCH)",'; \
@@ -65,9 +77,11 @@
echo ' "CrossHostArch": "$(HOST_CROSS_ARCH)",'; \
echo ' "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)",'; \
echo ' "Safestack": $(if $(filter true,$(USE_SAFESTACK)),true,false),'; \
- echo ' "EnableCFI": $(if $(filter true,$(ENABLE_CFI)),true,false),'; \
- echo ' "Treble": $(if $(filter true,$(PRODUCT_FULL_TREBLE)),true,false),'; \
+ echo ' "EnableCFI": $(if $(filter false,$(ENABLE_CFI)),false,true),'; \
+ echo ' "Device_uses_hwc2": $(if $(filter true,$(TARGET_USES_HWC2)),true,false),'; \
echo ' "Override_rs_driver": "$(OVERRIDE_RS_DRIVER)",'; \
+ echo ' "Treble": $(if $(filter true,$(PRODUCT_FULL_TREBLE)),true,false),'; \
+ echo ' "Pdk": $(if $(filter true,$(TARGET_BUILD_PDK)),true,false),'; \
echo ''; \
echo ' "ArtUseReadBarrier": $(if $(filter false,$(PRODUCT_ART_USE_READ_BARRIER)),false,true),'; \
echo ''; \
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 69196f4..6452fa8 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -39,6 +39,7 @@
# A static Java library needs to explicily set LOCAL_RESOURCE_DIR.
ifdef LOCAL_RESOURCE_DIR
need_compile_res := true
+LOCAL_RESOURCE_DIR := $(foreach d,$(LOCAL_RESOURCE_DIR),$(call clean-path,$(d)))
endif
ifdef LOCAL_USE_AAPT2
ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
@@ -186,9 +187,9 @@
# if we have custom proguarding done use the proguarded classes jar instead of the normal classes jar
ifeq ($(filter custom,$(LOCAL_PROGUARD_ENABLED)),custom)
-aar_classes_jar = $(full_classes_jar)
+aar_classes_jar = $(full_classes_proguard_jar)
else
-aar_classes_jar = $(full_classes_pre_proguard_jar)
+aar_classes_jar = $(full_classes_jar)
endif
# Rule to build AAR, archive including classes.jar, resource, etc.
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index 8ebf89b..0750217 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -37,6 +37,14 @@
# - CUSTOM_IMAGE_SELINUX, set to "true" if the image supports selinux.
# - CUSTOM_IMAGE_SUPPORT_VERITY, set to "true" if the product supports verity.
# - CUSTOM_IMAGE_VERITY_BLOCK_DEVICE
+# - CUSTOM_IMAGE_AVB_HASH_ENABLE, set to "true" to add AVB HASH footer.
+# - CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS, additional args of AVB HASH footer.
+# - CUSTOM_IMAGE_AVB_HASHTREE_ENABLE, set to "true" to add AVB HASHTREE
+# footer.
+# - CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS, additional args of AVB
+# HASHTREE footer.
+# - CUSTOM_IMAGE_AVB_KEY_PATH, custom AVB signing key.
+# - CUSTOM_IMAGE_AVB_ALGORITHM, custom AVB signing algorithm.
#
# To build all those images, run "make custom_images".
@@ -54,6 +62,12 @@
CUSTOM_IMAGE_SELINUX \
CUSTOM_IMAGE_SUPPORT_VERITY \
CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
+ CUSTOM_IMAGE_AVB_HASH_ENABLE \
+ CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS \
+ CUSTOM_IMAGE_AVB_HASHTREE_ENABLE \
+ CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS \
+ CUSTOM_IMAGE_AVB_KEY_PATH \
+ CUSTOM_IMAGE_AVB_ALGORITHM \
# We don't expect product makefile to inherit/override PRODUCT_CUSTOM_IMAGE_MAKEFILES,
# so we don't put it in the _product_var_list.
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 731937f..b1b936a 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -17,11 +17,13 @@
device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
$(device-tests-zip): $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
- echo $(COMPATIBILITY.device-tests.FILES) > $@.list
+ echo $(sort $(COMPATIBILITY.device-tests.FILES)) > $@.list
sed -i -e 's/\s\+/\n/g' $@.list
grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
- $(hide) $(SOONG_ZIP) -d -o $@ -C $(HOST_OUT) -l $@-host.list -C $(PRODUCT_OUT) -l $@-target.list
+ $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
device-tests: $(device-tests-zip)
$(call dist-for-goals, device-tests, $(device-tests-zip))
+
+tests: device-tests
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index e02faa7..763dd51 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -16,11 +16,11 @@
general-tests-zip := $(PRODUCT_OUT)/general-tests.zip
$(general-tests-zip): $(COMPATIBILITY.general-tests.FILES) $(SOONG_ZIP)
- echo $(COMPATIBILITY.general-tests.FILES) > $@.list
+ echo $(sort $(COMPATIBILITY.general-tests.FILES)) > $@.list
sed -i -e 's/\s\+/\n/g' $@.list
grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
- $(hide) $(SOONG_ZIP) -d -o $@ -C $(HOST_OUT) -l $@-host.list -C $(PRODUCT_OUT) -l $@-target.list
+ $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
general-tests: $(general-tests-zip)
$(call dist-for-goals, general-tests, $(general-tests-zip))
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f0db476..8c098d6 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -62,6 +62,14 @@
$(eval my_image_copy_files += $(src))\
$(eval my_copy_pairs += $(src):$(my_staging_dir)/$(word 2,$(pair))))
+ifndef CUSTOM_IMAGE_AVB_KEY_PATH
+# If key path isn't specified, use the default signing args.
+my_avb_signing_args := $(INTERNAL_AVB_SIGNING_ARGS)
+else
+my_avb_signing_args := \
+ --algorithm $(CUSTOM_IMAGE_AVB_ALGORITHM) --key $(CUSTOM_IMAGE_AVB_KEY_PATH)
+endif
+
$(my_built_custom_image): PRIVATE_INTERMEDIATES := $(intermediates)
$(my_built_custom_image): PRIVATE_MOUNT_POINT := $(CUSTOM_IMAGE_MOUNT_POINT)
$(my_built_custom_image): PRIVATE_PARTITION_SIZE := $(CUSTOM_IMAGE_PARTITION_SIZE)
@@ -74,6 +82,17 @@
$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)
$(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
$(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
+$(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
+$(my_built_custom_image): PRIVATE_AVB_SIGNING_ARGS := $(my_avb_signing_args)
+$(my_built_custom_image): PRIVATE_AVB_HASH_ENABLE := $(CUSTOM_IMAGE_AVB_HASH_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASH_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS)
+$(my_built_custom_image): PRIVATE_AVB_HASHTREE_ENABLE := $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)
+$(my_built_custom_image): PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS := $(CUSTOM_IMAGE_AVB_ADD_HASHTREE_FOOTER_ARGS)
+ifeq (true,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
+ $(my_built_custom_image): $(AVBTOOL)
+else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
+ $(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
+endif
$(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) \
$(CUSTOM_IMAGE_DICT_FILE)
@echo "Build image $@"
@@ -88,6 +107,7 @@
# Generate the dict.
$(hide) echo "# For all accepted properties, see BuildImage() in tools/releasetools/build_image.py" > $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "mount_point=$(PRIVATE_MOUNT_POINT)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+ $(hide) echo "partition_name=$(PRIVATE_MOUNT_POINT)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "fs_type=$(PRIVATE_FILE_SYSTEM_TYPE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "partition_size=$(PRIVATE_PARTITION_SIZE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
@@ -97,6 +117,14 @@
echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+ $(hide) echo "avb_avbtool=$(PRIVATE_AVB_AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+ $(hide) echo "avb_signing_args=$(PRIVATE_AVB_SIGNING_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
+ $(if $(PRIVATE_AVB_HASH_ENABLE),\
+ $(hide) echo "avb_hash_enable=$(PRIVATE_AVB_HASH_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+ echo "avb_add_hash_footer_args=$(PRIVATE_AVB_ADD_HASH_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
+ $(if $(PRIVATE_AVB_HASHTREE_ENABLE),\
+ $(hide) echo "avb_hashtree_enable=$(PRIVATE_AVB_HASHTREE_ENABLE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
+ echo "avb_add_hashtree_footer_args=$(PRIVATE_AVB_ADD_HASHTREE_FOOTER_ARGS)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(if $(PRIVATE_DICT_FILE),\
$(hide) echo "# Properties from $(PRIVATE_DICT_FILE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 4dde9fd..63fab63 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -25,6 +25,12 @@
$(eval my_modules_and_deps += $(_explicitly_required))\
)
+# Ignore unknown installed files on partial builds
+my_missing_files :=
+ifneq ($(ALLOW_MISSING_DEPENDENCIES),true)
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+endif
+
# Iterate over modules' built files and installed files;
# Calculate the dest files in the output zip file.
@@ -34,7 +40,7 @@
$(eval _built_files := $(strip $(ALL_MODULES.$(m).BUILT_INSTALLED)\
$(ALL_MODULES.$(m)$(TARGET_2ND_ARCH_MODULE_SUFFIX).BUILT_INSTALLED)))\
$(if $(_pickup_files)$(_built_files),,\
- $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(m)')))\
+ $(call my_missing_files,$(m)))\
$(eval my_pickup_files += $(_pickup_files))\
$(foreach i, $(_built_files),\
$(eval bui_ins := $(subst :,$(space),$(i)))\
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 5066522..0a9f483 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,7 +40,7 @@
DEFAULT_PLATFORM_VERSION := OPR1
MIN_PLATFORM_VERSION := OPR1
-MAX_PLATFORM_VERSION := OPR1
+MAX_PLATFORM_VERSION := PPR1
ALLOWED_VERSIONS := $(call allowed-platform-versions,\
$(MIN_PLATFORM_VERSION),\
@@ -49,6 +49,12 @@
ifndef TARGET_PLATFORM_VERSION
TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+else ifeq ($(TARGET_PLATFORM_VERSION),OPR1)
+ # HACK: lunch currently sets TARGET_PLATFORM_VERSION to
+ # DEFAULT_PLATFORM_VERSION, which causes unnecessary pain
+ # when the old DEFAULT_PLATFORM_VERSION becomes invalid.
+ # For now, silently upgrade OPR1 to the current default.
+ TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
endif
ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
@@ -74,10 +80,12 @@
# please add that PLATFORM_VERSION to the following text file:
# cts/tests/tests/os/assets/platform_versions.txt
PLATFORM_VERSION.OPR1 := 8.0.0
+PLATFORM_VERSION.PPR1 := P
# This is the current development code-name, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.OPR1 := REL
+PLATFORM_VERSION_CODENAME.OPR1 := O
+PLATFORM_VERSION_CODENAME.PPR1 := P
ifndef PLATFORM_VERSION
PLATFORM_VERSION := $(PLATFORM_VERSION.$(TARGET_PLATFORM_VERSION))
@@ -126,7 +134,22 @@
# This is all of the development codenames that are active. Should be either
# the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
# codenames after PLATFORM_VERSION_CODENAME.
- PLATFORM_VERSION_ALL_CODENAMES := $(PLATFORM_VERSION_CODENAME)
+ PLATFORM_VERSION_ALL_CODENAMES :=
+
+ # Build a list of all possible code names. Avoid duplicates, and stop when we
+ # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+ # that is not included in our build.
+ _versions_in_target := \
+ $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+ $(foreach version,$(_versions_in_target),\
+ $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+ $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+ $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+ # And convert from space separated to comma separated.
+ PLATFORM_VERSION_ALL_CODENAMES := \
+ $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
endif
ifeq (REL,$(PLATFORM_VERSION_CODENAME))
diff --git a/envsetup.sh b/envsetup.sh
index ec6c960..df4d106 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -610,7 +610,11 @@
export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
- export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+ if [ -n "$version" ]; then
+ export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+ else
+ unset TARGET_PLATFORM_VERSION
+ fi
export TARGET_BUILD_TYPE=release
echo
@@ -1239,7 +1243,7 @@
Darwin)
function mgrep()
{
- find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' \
+ find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regex '(.*/)?soong/[^/]*.go' \) -type f \
-exec grep --color -n "$@" {} +
}
@@ -1253,7 +1257,7 @@
*)
function mgrep()
{
- find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -type f \
+ find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o \( -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk|.*\.bp)' -o -regextype posix-extended -regex '(.*/)?soong/[^/]*.go' \) -type f \
-exec grep --color -n "$@" {} +
}
diff --git a/target/product/aosp_arm64_a.mk b/target/product/aosp_arm64_a.mk
index 535b3a4..0b0ba61 100644
--- a/target/product/aosp_arm64_a.mk
+++ b/target/product/aosp_arm64_a.mk
@@ -16,10 +16,90 @@
# PRODUCT_PROPERTY_OVERRIDES cannot be used here because sysprops will be at
# /vendor/[build|default].prop when build split is on. In order to have sysprops
-# on the generic system image, place them in build/make/target/board/generic_arm64_a/
+# on the generic system image, place them in build/make/target/board/generic_arm_nonab/
# system.prop.
-include build/make/target/product/treble_common.mk
+PRODUCT_COPY_FILES := \
+ device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+
+#split selinux policy
+PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+# Some of HAL interface libraries are automatically added by the dependencies from
+# the framework. However, we list them all here to make it explicit and prevent
+# possible mistake.
+PRODUCT_PACKAGES := \
+ android.dvr.composer@1.0 \
+ android.hardware.audio@2.0 \
+ android.hardware.audio.common@2.0 \
+ android.hardware.audio.common@2.0-util \
+ android.hardware.audio.effect@2.0 \
+ android.hardware.biometrics.fingerprint@2.1 \
+ android.hardware.bluetooth@1.0 \
+ android.hardware.boot@1.0 \
+ android.hardware.broadcastradio@1.0 \
+ android.hardware.broadcastradio@1.1 \
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.configstore@1.0 \
+ android.hardware.contexthub@1.0 \
+ android.hardware.drm@1.0 \
+ android.hardware.gatekeeper@1.0 \
+ android.hardware.gnss@1.0 \
+ android.hardware.graphics.allocator@2.0 \
+ android.hardware.graphics.common@1.0 \
+ android.hardware.graphics.composer@2.1 \
+ android.hardware.graphics.mapper@2.0 \
+ android.hardware.ir@1.0 \
+ android.hardware.keymaster@3.0 \
+ android.hardware.light@2.0 \
+ android.hardware.media@1.0 \
+ android.hardware.media.omx@1.0 \
+ android.hardware.media.omx@1.0-utils \
+ android.hardware.memtrack@1.0 \
+ android.hardware.nfc@1.0 \
+ android.hardware.oemlock@1.0 \
+ android.hardware.power@1.0 \
+ android.hardware.radio@1.0 \
+ android.hardware.radio.deprecated@1.0 \
+ android.hardware.sensors@1.0 \
+ android.hardware.soundtrigger@2.0 \
+ android.hardware.thermal@1.0 \
+ android.hardware.tv.cec@1.0 \
+ android.hardware.tv.input@1.0 \
+ android.hardware.usb@1.0 \
+ android.hardware.vibrator@1.0 \
+ android.hardware.vr@1.0 \
+ android.hardware.weaver@1.0 \
+ android.hardware.wifi@1.0 \
+ android.hardware.wifi.supplicant@1.0 \
+ android.hidl.allocator@1.0 \
+ android.hidl.base@1.0 \
+ android.hidl.manager@1.0 \
+ android.hidl.memory@1.0 \
+
+PRODUCT_PACKAGES += \
+ libdynamic_sensor_ext \
+ libaudioroute \
+ libxml2 \
+ libtinyalsa \
+ libtinycompress \
+ cplay \
+ libion \
+
+# WiFi
+# Note: Wifi HAL (android.hardware.wifi@1.0-service, wpa_supplicant,
+# and wpa_supplicant.conf) is not here. They are at vendor.img
+PRODUCT_PACKAGES += \
+ libwpa_client \
+ hostapd \
+ hostapd_cli \
+ wificond \
+ wifilogd \
+
+PRODUCT_SYSTEM_VERITY_PARTITION := /dev/block/bootdevice/by-name/system
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
diff --git a/target/product/aosp_arm64_ab.mk b/target/product/aosp_arm64_ab.mk
index 442ac25..63952d0 100644
--- a/target/product/aosp_arm64_ab.mk
+++ b/target/product/aosp_arm64_ab.mk
@@ -19,7 +19,86 @@
# on the generic system image, place them in build/make/target/board/generic_arm64_ab/
# system.prop.
-include build/make/target/product/treble_common.mk
+PRODUCT_COPY_FILES := \
+ device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+
+#split selinux policy
+PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+# Some of HAL interface libraries are automatically added by the dependencies from
+# the framework. However, we list them all here to make it explicit and prevent
+# possible mistake.
+PRODUCT_PACKAGES := \
+ android.frameworks.vr.composer@1.0 \
+ android.hardware.audio@2.0 \
+ android.hardware.audio.common@2.0 \
+ android.hardware.audio.common@2.0-util \
+ android.hardware.audio.effect@2.0 \
+ android.hardware.biometrics.fingerprint@2.1 \
+ android.hardware.bluetooth@1.0 \
+ android.hardware.boot@1.0 \
+ android.hardware.broadcastradio@1.0 \
+ android.hardware.broadcastradio@1.1 \
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.configstore@1.0 \
+ android.hardware.contexthub@1.0 \
+ android.hardware.drm@1.0 \
+ android.hardware.gatekeeper@1.0 \
+ android.hardware.gnss@1.0 \
+ android.hardware.graphics.allocator@2.0 \
+ android.hardware.graphics.common@1.0 \
+ android.hardware.graphics.composer@2.1 \
+ android.hardware.graphics.mapper@2.0 \
+ android.hardware.ir@1.0 \
+ android.hardware.keymaster@3.0 \
+ android.hardware.light@2.0 \
+ android.hardware.media@1.0 \
+ android.hardware.media.omx@1.0 \
+ android.hardware.media.omx@1.0-utils \
+ android.hardware.memtrack@1.0 \
+ android.hardware.nfc@1.0 \
+ android.hardware.power@1.0 \
+ android.hardware.radio@1.0 \
+ android.hardware.radio.deprecated@1.0 \
+ android.hardware.sensors@1.0 \
+ android.hardware.soundtrigger@2.0 \
+ android.hardware.thermal@1.0 \
+ android.hardware.tv.cec@1.0 \
+ android.hardware.tv.input@1.0 \
+ android.hardware.usb@1.0 \
+ android.hardware.vibrator@1.0 \
+ android.hardware.vr@1.0 \
+ android.hardware.wifi@1.0 \
+ android.hardware.wifi.supplicant@1.0 \
+ android.hidl.allocator@1.0 \
+ android.hidl.memory@1.0 \
+
+PRODUCT_PACKAGES += \
+ libdynamic_sensor_ext \
+ libaudioroute \
+ libxml2 \
+ libtinyalsa \
+ libtinycompress \
+ cplay \
+ libion \
+
+# WiFi
+# Note: Wifi HAL (android.hardware.wifi@1.0-service, wpa_supplicant,
+# and wpa_supplicant.conf) is not here. They are at vendor.img
+PRODUCT_PACKAGES += \
+ libwpa_client \
+ hostapd \
+ hostapd_cli \
+ wificond \
+ wifilogd \
+
+# TODO(jiyong) move ims to vendor partition
+#PRODUCT_PACKAGES += ims
+
+PRODUCT_SYSTEM_VERITY_PARTITION := /dev/block/bootdevice/by-name/system
AB_OTA_UPDATER := true
AB_OTA_PARTITIONS := system
diff --git a/target/product/aosp_arm_a.mk b/target/product/aosp_arm_a.mk
index c3188e0..83db402 100644
--- a/target/product/aosp_arm_a.mk
+++ b/target/product/aosp_arm_a.mk
@@ -19,7 +19,88 @@
# on the generic system image, place them in build/make/target/board/generic_arm_a/
# system.prop.
-include build/make/target/product/treble_common.mk
+PRODUCT_COPY_FILES := \
+ device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+
+#split selinux policy
+PRODUCT_FULL_TREBLE_OVERRIDE := true
+
+# Some of HAL interface libraries are automatically added by the dependencies from
+# the framework. However, we list them all here to make it explicit and prevent
+# possible mistake.
+PRODUCT_PACKAGES := \
+ android.dvr.composer@1.0 \
+ android.hardware.audio@2.0 \
+ android.hardware.audio.common@2.0 \
+ android.hardware.audio.common@2.0-util \
+ android.hardware.audio.effect@2.0 \
+ android.hardware.biometrics.fingerprint@2.1 \
+ android.hardware.bluetooth@1.0 \
+ android.hardware.boot@1.0 \
+ android.hardware.broadcastradio@1.0 \
+ android.hardware.broadcastradio@1.1 \
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.configstore@1.0 \
+ android.hardware.contexthub@1.0 \
+ android.hardware.drm@1.0 \
+ android.hardware.gatekeeper@1.0 \
+ android.hardware.gnss@1.0 \
+ android.hardware.graphics.allocator@2.0 \
+ android.hardware.graphics.common@1.0 \
+ android.hardware.graphics.composer@2.1 \
+ android.hardware.graphics.mapper@2.0 \
+ android.hardware.ir@1.0 \
+ android.hardware.keymaster@3.0 \
+ android.hardware.light@2.0 \
+ android.hardware.media@1.0 \
+ android.hardware.media.omx@1.0 \
+ android.hardware.media.omx@1.0-utils \
+ android.hardware.memtrack@1.0 \
+ android.hardware.nfc@1.0 \
+ android.hardware.oemlock@1.0 \
+ android.hardware.power@1.0 \
+ android.hardware.radio@1.0 \
+ android.hardware.radio.deprecated@1.0 \
+ android.hardware.sensors@1.0 \
+ android.hardware.soundtrigger@2.0 \
+ android.hardware.thermal@1.0 \
+ android.hardware.tv.cec@1.0 \
+ android.hardware.tv.input@1.0 \
+ android.hardware.usb@1.0 \
+ android.hardware.usb@1.1 \
+ android.hardware.vibrator@1.0 \
+ android.hardware.vr@1.0 \
+ android.hardware.weaver@1.0 \
+ android.hardware.wifi@1.0 \
+ android.hardware.wifi.supplicant@1.0 \
+ android.hidl.allocator@1.0 \
+ android.hidl.base@1.0 \
+ android.hidl.manager@1.0 \
+ android.hidl.memory@1.0 \
+
+PRODUCT_PACKAGES += \
+ libdynamic_sensor_ext \
+ libaudioroute \
+ libxml2 \
+ libtinyalsa \
+ libtinycompress \
+ cplay \
+ libion \
+
+# WiFi
+# Note: Wifi HAL (android.hardware.wifi@1.0-service, wpa_supplicant,
+# and wpa_supplicant.conf) is not here. They are at vendor.img
+PRODUCT_PACKAGES += \
+ libwpa_client \
+ hostapd \
+ hostapd_cli \
+ wificond \
+ wifilogd \
+
+PRODUCT_SYSTEM_VERITY_PARTITION := /dev/block/bootdevice/by-name/system
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/verity.mk)
diff --git a/target/product/base.mk b/target/product/base.mk
index 89a2aaa..ad4c133 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -147,7 +147,8 @@
# Packages included only for eng or userdebug builds, previously debug tagged
PRODUCT_PACKAGES_DEBUG := \
- perfprofd
+ perfprofd \
+ sqlite3
PRODUCT_COPY_FILES := $(call add-to-product-copy-files-if-exists,\
frameworks/base/preloaded-classes:system/etc/preloaded-classes)
diff --git a/target/product/product_launched_with_n_mr1.mk b/target/product/product_launched_with_n_mr1.mk
new file mode 100644
index 0000000..65d4d3f
--- /dev/null
+++ b/target/product/product_launched_with_n_mr1.mk
@@ -0,0 +1,2 @@
+#PRODUCT_SHIPPING_API_LEVEL indicates the first api level, device has been commercially launced on.
+PRODUCT_SHIPPING_API_LEVEL := 25
diff --git a/target/product/treble_common.mk b/target/product/treble_common.mk
deleted file mode 100644
index 92876ef..0000000
--- a/target/product/treble_common.mk
+++ /dev/null
@@ -1,183 +0,0 @@
-#
-# Copyright (C) 2017 The Android Open-Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Split selinux policy
-PRODUCT_FULL_TREBLE_OVERRIDE := true
-
-# HAL interfaces:
-# Some of HAL interface libraries are automatically added by the dependencies
-# from the framework. However, we list them all here to make it explicit and
-# prevent possible mistake.
-PRODUCT_PACKAGES := \
- android.frameworks.schedulerservice@1.0 \
- android.frameworks.sensorservice@1.0 \
- android.frameworks.vr.composer@1.0 \
- android.hardware.audio@2.0 \
- android.hardware.audio.common@2.0 \
- android.hardware.audio.common@2.0-util \
- android.hardware.audio.effect@2.0 \
- android.hardware.biometrics.fingerprint@2.1 \
- android.hardware.bluetooth@1.0 \
- android.hardware.boot@1.0 \
- android.hardware.broadcastradio@1.0 \
- android.hardware.broadcastradio@1.1 \
- android.hardware.camera.common@1.0 \
- android.hardware.camera.device@1.0 \
- android.hardware.camera.device@3.2 \
- android.hardware.camera.provider@2.4 \
- android.hardware.configstore-utils \
- android.hardware.configstore@1.0 \
- android.hardware.contexthub@1.0 \
- android.hardware.drm@1.0 \
- android.hardware.dumpstate@1.0 \
- android.hardware.gatekeeper@1.0 \
- android.hardware.gnss@1.0 \
- android.hardware.graphics.allocator@2.0 \
- android.hardware.graphics.bufferqueue@1.0 \
- android.hardware.graphics.common@1.0 \
- android.hardware.graphics.composer@2.1 \
- android.hardware.graphics.mapper@2.0 \
- android.hardware.health@1.0 \
- android.hardware.ir@1.0 \
- android.hardware.keymaster@3.0 \
- android.hardware.light@2.0 \
- android.hardware.media@1.0 \
- android.hardware.media.omx@1.0 \
- android.hardware.memtrack@1.0 \
- android.hardware.nfc@1.0 \
- android.hardware.oemlock@1.0 \
- android.hardware.power@1.0 \
- android.hardware.radio@1.0 \
- android.hardware.radio.deprecated@1.0 \
- android.hardware.sensors@1.0 \
- android.hardware.soundtrigger@2.0 \
- android.hardware.thermal@1.0 \
- android.hardware.tv.cec@1.0 \
- android.hardware.tv.input@1.0 \
- android.hardware.usb@1.0 \
- android.hardware.usb@1.1 \
- android.hardware.vibrator@1.0 \
- android.hardware.vr@1.0 \
- android.hardware.weaver@1.0 \
- android.hardware.wifi@1.0 \
- android.hardware.wifi.supplicant@1.0 \
- android.hidl.allocator@1.0 \
- android.hidl.base@1.0 \
- android.hidl.manager@1.0 \
- android.hidl.memory@1.0 \
- android.hidl.token@1.0 \
- android.system.wifi.keystore@1.0 \
-
-# VNDK:
-# Some VNDK shared objects are automatically included indirectly.
-# We list them all here to make it explicit and prevent possible mistakes.
-# An example of one such mistake was libcurl, which is included in A/B
-# devices because of update_engine, but not in non-A/B devices.
-PRODUCT_PACKAGES += \
- libaudioroute \
- libaudioutils \
- libbinder \
- libcamera_metadata \
- libcap \
- libcrypto \
- libcrypto_utils \
- libcups \
- libcurl \
- libdiskconfig \
- libdumpstateutil \
- libevent \
- libexif \
- libexpat \
- libfmq \
- libgatekeeper \
- libgui \
- libhardware_legacy \
- libhidlmemory \
- libicui18n \
- libicuuc \
- libjpeg \
- libkeymaster1 \
- libkeymaster_messages \
- libldacBT_abr \
- libldacBT_enc \
- liblz4 \
- libmdnssd \
- libmemtrack \
- libmemunreachable \
- libmetricslogger \
- libminijail \
- libnetutils \
- libnl \
- libopus \
- libpagemap \
- libpcap \
- libpcre2 \
- libpcrecpp \
- libpdfium \
- libpiex \
- libpower \
- libprocessgroup \
- libprocinfo \
- libprotobuf-cpp-full \
- libprotobuf-cpp-lite \
- libradio_metadata \
- libsoftkeymasterdevice \
- libsonic \
- libsonivox \
- libspeexresampler \
- libsqlite \
- libssl \
- libsuspend \
- libsysutils \
- libtinyalsa \
- libtinyxml2 \
- libui \
- libusbhost \
- libvixl-arm \
- libvixl-arm64 \
- libvorbisidec \
- libwebrtc_audio_preprocessing \
- libxml2 \
- libziparchive \
-
-# VNDK-SP:
-PRODUCT_PACKAGES += \
- vndk-sp \
-
-PRODUCT_SYSTEM_VERITY_PARTITION := /dev/block/bootdevice/by-name/system
-
-# Wifi:
-# Wifi HAL (android.hardware.wifi@1.0-service, wpa_supplicant,
-# and wpa_supplicant.conf) is not here. They are in vendor.img
-PRODUCT_PACKAGES += \
- wificond \
-
-# Audio:
-USE_XML_AUDIO_POLICY_CONF := 1
-# The following policy XML files are used as fallback for
-# vendors/devices not using XML to configure audio policy.
-PRODUCT_COPY_FILES += \
- frameworks/av/services/audiopolicy/config/audio_policy_configuration.xml:system/etc/audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/a2dp_audio_policy_configuration.xml:system/etc/a2dp_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/usb_audio_policy_configuration.xml:system/etc/usb_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/r_submix_audio_policy_configuration.xml:system/etc/r_submix_audio_policy_configuration.xml \
- frameworks/av/services/audiopolicy/config/audio_policy_volumes.xml:system/etc/audio_policy_volumes.xml \
- frameworks/av/services/audiopolicy/config/default_volume_tables.xml:system/etc/default_volume_tables.xml \
-
-# May need to review why the followings are needed in generic system image.
-PRODUCT_COPY_FILES := \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
-
diff --git a/tests/envsetup_tests.sh b/tests/envsetup_tests.sh
index 4aae255..abdcd56 100755
--- a/tests/envsetup_tests.sh
+++ b/tests/envsetup_tests.sh
@@ -19,8 +19,9 @@
valid_version=PPR1
# lunch tests
-check_lunch "aosp_arm64" "aosp_arm64" "eng" "$default_version"
-check_lunch "aosp_arm64-userdebug" "aosp_arm64" "userdebug" "$default_version"
+check_lunch "aosp_arm64" "aosp_arm64" "eng" ""
+check_lunch "aosp_arm64-userdebug" "aosp_arm64" "userdebug" ""
+check_lunch "aosp_arm64-userdebug-$default_version" "aosp_arm64" "userdebug" "$default_version"
check_lunch "aosp_arm64-userdebug-$valid_version" "aosp_arm64" "userdebug" "$valid_version"
check_lunch "abc" "" "" ""
check_lunch "aosp_arm64-abc" "" "" ""
diff --git a/tools/checkowners.py b/tools/checkowners.py
index 8f450e7..b874955 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -5,6 +5,7 @@
import argparse
import re
import sys
+import urllib
import urllib2
parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
@@ -29,7 +30,8 @@
def find_address(address):
if address not in checked_addresses:
- request = gerrit_server + '/accounts/?suggest&q=' + address
+ request = (gerrit_server + '/accounts/?n=1&o=ALL_EMAILS&q=email:'
+ + urllib.quote(address))
echo('Checking email address: ' + address)
result = urllib2.urlopen(request).read()
expected = '"email": "' + address + '"'
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index 65f8a08..ab7f92d 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -113,6 +113,11 @@
include $(BUILD_HOST_EXECUTABLE)
fs_config_generate_bin := $(LOCAL_INSTALLED_MODULE)
+# List of all supported vendor, oem and odm Partitions
+fs_config_generate_extra_partition_list := $(strip \
+ $(if $(BOARD_USES_VENDORIMAGE)$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),vendor) \
+ $(if $(BOARD_USES_OEMIMAGE)$(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),oem) \
+ $(if $(BOARD_USES_ODMIMAGE)$(BOARD_ODMIMAGE_FILE_SYSTEM_TYPE),odm))
##################################
# Generate the system/etc/fs_config_dirs binary file for the target
@@ -121,10 +126,13 @@
LOCAL_MODULE := fs_config_dirs
LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
include $(BUILD_SYSTEM)/base_rules.mk
$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
@mkdir -p $(dir $@)
- $< -D -o $@
+ $< -D $(if $(fs_config_generate_extra_partition_list), \
+ -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+ -o $@
##################################
# Generate the system/etc/fs_config_files binary file for the target
@@ -133,10 +141,112 @@
LOCAL_MODULE := fs_config_files
LOCAL_MODULE_CLASS := ETC
+LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),$(LOCAL_MODULE)_$(t))
include $(BUILD_SYSTEM)/base_rules.mk
$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
@mkdir -p $(dir $@)
- $< -F -o $@
+ $< -F $(if $(fs_config_generate_extra_partition_list), \
+ -P '$(subst $(space),$(comma),$(addprefix -,$(fs_config_generate_extra_partition_list)))') \
+ -o $@
+
+ifneq ($(filter vendor,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the vendor/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_vendor to PRODUCT_PACKAGES in
+# the device make file to enable.
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -D -P vendor -o $@
+
+##################################
+# Generate the vendor/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_vendor to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_vendor
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -F -P vendor -o $@
+
+endif
+
+ifneq ($(filter oem,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the oem/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -D -P oem -o $@
+
+##################################
+# Generate the oem/etc/fs_config_files binary file for the target
+# Add fs_config_files or fs_config_files_oem to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_oem
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -F -P oem -o $@
+
+endif
+
+ifneq ($(filter odm,$(fs_config_generate_extra_partition_list)),)
+##################################
+# Generate the odm/etc/fs_config_dirs binary file for the target
+# Add fs_config_dirs or fs_config_dirs_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_dirs_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -D -P odm -o $@
+
+##################################
+# Generate the odm/etc/fs_config_files binary file for the target
+# Add fs_config_files of fs_config_files_odm to PRODUCT_PACKAGES in
+# the device make file to enable
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := fs_config_files_odm
+LOCAL_MODULE_CLASS := ETC
+LOCAL_INSTALLED_MODULE_STEM := fs_config_files
+LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
+include $(BUILD_SYSTEM)/base_rules.mk
+$(LOCAL_BUILT_MODULE): $(fs_config_generate_bin)
+ @mkdir -p $(dir $@)
+ $< -F -P odm -o $@
+
+endif
# The newer passwd/group targets are only generated if you
# use the new TARGET_FS_CONFIG_GEN method.
@@ -195,3 +305,37 @@
my_fs_config_h :=
fs_config_generate_bin :=
my_gen_oem_aid :=
+fs_config_generate_extra_partition_list :=
+
+# -----------------------------------------------------------------------------
+# Unit tests.
+# -----------------------------------------------------------------------------
+
+test_c_flags := \
+ -fstack-protector-all \
+ -g \
+ -Wall \
+ -Wextra \
+ -Werror \
+ -fno-builtin \
+ -DANDROID_FILESYSTEM_CONFIG='"android_filesystem_config_test_data.h"'
+
+##################################
+# test executable
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config_generate_test
+LOCAL_SRC_FILES := fs_config_generate.c
+LOCAL_SHARED_LIBRARIES := libcutils
+LOCAL_CFLAGS := $(test_c_flags)
+LOCAL_MODULE_RELATIVE_PATH := fs_config-unit-tests
+LOCAL_GTEST := false
+include $(BUILD_HOST_NATIVE_TEST)
+
+##################################
+# gTest tool
+include $(CLEAR_VARS)
+LOCAL_MODULE := fs_config-unit-tests
+LOCAL_CFLAGS += $(test_c_flags) -DHOST
+LOCAL_SHARED_LIBRARIES := liblog libcutils libbase
+LOCAL_SRC_FILES := fs_config_test.cpp
+include $(BUILD_HOST_NATIVE_TEST)
diff --git a/tools/fs_config/README b/tools/fs_config/README
index 9919131..5af407f 100644
--- a/tools/fs_config/README
+++ b/tools/fs_config/README
@@ -156,9 +156,28 @@
${OUT} directory are used in the final stages when building the filesystem
images to set the file and directory properties.
+For systems with separate partition images, such as vendor or oem,
+fs_config_generate can be instructed to filter the specific file references
+to land in each partition's etc/fs_config_dirs or etc/fs_config_files
+locations. The filter can be instructed to blacklist a partition's data by
+providing the comma separated minus sign prefixed partition names. The filter
+can be instructed to whitelist partition data by providing the partition name.
+
+For example:
+- For system.img, but not vendor, oem or odm file references:
+ -P -vendor,-oem,-odm
+ This makes sure the results only contain content associated with the
+ system, and not vendor, oem or odm, blacklisting their content.
+- For vendor.img file references: -P vendor
+- For oem.img file references: -P oem
+- For odm.img file references: -P odm
+
fs_config_generate --help reports:
Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)
-from device-specific android_filesystem_config.h override
+from device-specific android_filesystem_config.h override. Filter based
+on a comma separated partition list (-P) whitelist or prefixed by a
+minus blacklist. Partitions are identified as path references to
+<partition>/ or system/<partition>
-Usage: fs_config_generate -D|-F [-o output-file]
+Usage: fs_config_generate -D|-F [-P list] [-o output-file]
diff --git a/tools/fs_config/android_filesystem_config_test_data.h b/tools/fs_config/android_filesystem_config_test_data.h
new file mode 100644
index 0000000..07bc8e5
--- /dev/null
+++ b/tools/fs_config/android_filesystem_config_test_data.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <private/android_filesystem_config.h>
+
+/* Test Data */
+
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
+#undef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
+
+static const struct fs_path_config android_device_dirs[] = {
+ {00555, AID_ROOT, AID_SYSTEM, 0, "system/etc"},
+ {00555, AID_ROOT, AID_SYSTEM, 0, "vendor/etc"},
+ {00555, AID_ROOT, AID_SYSTEM, 0, "oem/etc"},
+ {00555, AID_ROOT, AID_SYSTEM, 0, "odm/etc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "data/misc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "oem/data/misc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "odm/data/misc"},
+ {00755, AID_SYSTEM, AID_ROOT, 0, "vendor/data/misc"},
+ {00555, AID_SYSTEM, AID_ROOT, 0, "etc"},
+};
+
+static const struct fs_path_config android_device_files[] = {
+ {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_dirs"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_dirs"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_dirs"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_dirs"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "system/etc/fs_config_files"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "vendor/etc/fs_config_files"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "oem/etc/fs_config_files"},
+ {00444, AID_ROOT, AID_SYSTEM, 0, "odm/etc/fs_config_files"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_dirs"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_dirs"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_dirs"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/vendor/etc/fs_config_files"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/oem/etc/fs_config_files"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "system/odm/etc/fs_config_files"},
+ {00644, AID_SYSTEM, AID_ROOT, 0, "etc/fs_config_files"},
+ {00666, AID_ROOT, AID_SYSTEM, 0, "data/misc/oem"},
+};
diff --git a/tools/fs_config/fs_config_generate.c b/tools/fs_config/fs_config_generate.c
index c06213f..cb7ff9d 100644
--- a/tools/fs_config/fs_config_generate.c
+++ b/tools/fs_config/fs_config_generate.c
@@ -14,9 +14,11 @@
* limitations under the License.
*/
+#include <ctype.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
+#include <string.h>
#include <unistd.h>
#include <private/android_filesystem_config.h>
@@ -28,38 +30,57 @@
* the binary format used in the /system/etc/fs_config_dirs and
* the /system/etc/fs_config_files to be used by the runtimes.
*/
+#ifdef ANDROID_FILESYSTEM_CONFIG
+#include ANDROID_FILESYSTEM_CONFIG
+#else
#include "android_filesystem_config.h"
+#endif
#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
- static const struct fs_path_config android_device_dirs[] = {
-};
+static const struct fs_path_config android_device_dirs[] = { };
#endif
#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_FILES
static const struct fs_path_config android_device_files[] = {
#ifdef NO_ANDROID_FILESYSTEM_CONFIG_DEVICE_DIRS
- { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs" },
+ {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_dirs"},
+ {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_dirs"},
+ {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_dirs"},
+ {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_dirs"},
#endif
- { 0, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files" },
+ {0000, AID_ROOT, AID_ROOT, 0, "system/etc/fs_config_files"},
+ {0000, AID_ROOT, AID_ROOT, 0, "vendor/etc/fs_config_files"},
+ {0000, AID_ROOT, AID_ROOT, 0, "oem/etc/fs_config_files"},
+ {0000, AID_ROOT, AID_ROOT, 0, "odm/etc/fs_config_files"},
};
#endif
static void usage() {
fprintf(stderr,
"Generate binary content for fs_config_dirs (-D) and fs_config_files (-F)\n"
- "from device-specific android_filesystem_config.h override\n\n"
- "Usage: fs_config_generate -D|-F [-o output-file]\n");
+ "from device-specific android_filesystem_config.h override. Filter based\n"
+ "on a comma separated partition list (-P) whitelist or prefixed by a\n"
+ "minus blacklist. Partitions are identified as path references to\n"
+ "<partition>/ or system/<partition>/\n\n"
+ "Usage: fs_config_generate -D|-F [-P list] [-o output-file]\n");
}
-int main(int argc, char** argv) {
- const struct fs_path_config *pc;
- const struct fs_path_config *end;
- bool dir = false, file = false;
- FILE *fp = stdout;
- int opt;
+/* If tool switches to C++, use android-base/macros.h array_size() */
+#ifndef ARRAY_SIZE /* popular macro */
+#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+#endif
- while((opt = getopt(argc, argv, "DFho:")) != -1) {
- switch(opt) {
+int main(int argc, char** argv) {
+ const struct fs_path_config* pc;
+ const struct fs_path_config* end;
+ bool dir = false, file = false;
+ const char* partitions = NULL;
+ FILE* fp = stdout;
+ int opt;
+ static const char optstring[] = "DFP:ho:";
+
+ while ((opt = getopt(argc, argv, optstring)) != -1) {
+ switch (opt) {
case 'D':
if (file) {
fprintf(stderr, "Must specify only -D or -F\n");
@@ -76,6 +97,30 @@
}
file = true;
break;
+ case 'P':
+ if (partitions) {
+ fprintf(stderr, "Specify only one partition list\n");
+ usage();
+ exit(EXIT_FAILURE);
+ }
+ while (*optarg && isspace(*optarg)) ++optarg;
+ if (!optarg[0]) {
+ fprintf(stderr, "Partition list empty\n");
+ usage();
+ exit(EXIT_FAILURE);
+ }
+ if (!optarg[1]) {
+ fprintf(stderr, "Partition list too short \"%s\"\n", optarg);
+ usage();
+ exit(EXIT_FAILURE);
+ }
+ if ((optarg[0] == '-') && strchr(optstring, optarg[1]) && !optarg[2]) {
+ fprintf(stderr, "Partition list is a flag \"%s\"\n", optarg);
+ usage();
+ exit(EXIT_FAILURE);
+ }
+ partitions = optarg;
+ break;
case 'o':
if (fp != stdout) {
fprintf(stderr, "Specify only one output file\n");
@@ -97,6 +142,12 @@
}
}
+ if (optind < argc) {
+ fprintf(stderr, "Unknown non-argument \"%s\"\n", argv[optind]);
+ usage();
+ exit(EXIT_FAILURE);
+ }
+
if (!file && !dir) {
fprintf(stderr, "Must specify either -F or -D\n");
usage();
@@ -105,19 +156,64 @@
if (dir) {
pc = android_device_dirs;
- end = &android_device_dirs[sizeof(android_device_dirs) / sizeof(android_device_dirs[0])];
+ end = &android_device_dirs[ARRAY_SIZE(android_device_dirs)];
} else {
pc = android_device_files;
- end = &android_device_files[sizeof(android_device_files) / sizeof(android_device_files[0])];
+ end = &android_device_files[ARRAY_SIZE(android_device_files)];
}
- for(; (pc < end) && pc->prefix; pc++) {
+ for (; (pc < end) && pc->prefix; pc++) {
+ bool submit;
char buffer[512];
ssize_t len = fs_config_generate(buffer, sizeof(buffer), pc);
if (len < 0) {
fprintf(stderr, "Entry too large\n");
exit(EXIT_FAILURE);
}
- if (fwrite(buffer, 1, len, fp) != (size_t)len) {
+ submit = true;
+ if (partitions) {
+ char* partitions_copy = strdup(partitions);
+ char* arg = partitions_copy;
+ char* sv = NULL; /* Do not leave uninitialized, NULL is known safe. */
+ /* Deal with case all iterated partitions are blacklists with no match */
+ bool all_blacklist_but_no_match = true;
+ submit = false;
+
+ if (!partitions_copy) {
+ fprintf(stderr, "Failed to allocate a copy of %s\n", partitions);
+ exit(EXIT_FAILURE);
+ }
+ /* iterate through (officially) comma separated list of partitions */
+ while (!!(arg = strtok_r(arg, ",:; \t\n\r\f", &sv))) {
+ static const char system[] = "system/";
+ size_t plen;
+ bool blacklist = false;
+ if (*arg == '-') {
+ blacklist = true;
+ ++arg;
+ } else {
+ all_blacklist_but_no_match = false;
+ }
+ plen = strlen(arg);
+ /* deal with evil callers */
+ while (arg[plen - 1] == '/') {
+ --plen;
+ }
+ /* check if we have <partition>/ or /system/<partition>/ */
+ if ((!strncmp(pc->prefix, arg, plen) && (pc->prefix[plen] == '/')) ||
+ (!strncmp(pc->prefix, system, strlen(system)) &&
+ !strncmp(pc->prefix + strlen(system), arg, plen) &&
+ (pc->prefix[strlen(system) + plen] == '/'))) {
+ all_blacklist_but_no_match = false;
+ /* we have a match !!! */
+ if (!blacklist) submit = true;
+ break;
+ }
+ arg = NULL;
+ }
+ free(partitions_copy);
+ if (all_blacklist_but_no_match) submit = true;
+ }
+ if (submit && (fwrite(buffer, 1, len, fp) != (size_t)len)) {
fprintf(stderr, "Write failure\n");
exit(EXIT_FAILURE);
}
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 2cf2fd8..c8d1dd3 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -709,7 +709,7 @@
int(cap, 0)
tmp.append('(' + cap + ')')
except ValueError:
- tmp.append('(1ULL << CAP_' + cap.upper() + ')')
+ tmp.append('CAP_MASK_LONG(CAP_' + cap.upper() + ')')
caps = tmp
diff --git a/tools/fs_config/fs_config_test.cpp b/tools/fs_config/fs_config_test.cpp
new file mode 100644
index 0000000..f95a4ca
--- /dev/null
+++ b/tools/fs_config/fs_config_test.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <sys/cdefs.h>
+
+#include <string>
+#include <vector>
+
+#include <android-base/file.h>
+#include <android-base/macros.h>
+#include <android-base/strings.h>
+#include <android-base/stringprintf.h>
+#include <gtest/gtest.h>
+#include <private/android_filesystem_config.h>
+#include <private/fs_config.h>
+
+#include "android_filesystem_config_test_data.h"
+
+// must run test in the test directory
+const static char fs_config_generate_command[] = "./fs_config_generate_test";
+
+static std::string popenToString(std::string command) {
+ std::string ret;
+
+ FILE* fp = popen(command.c_str(), "r");
+ if (fp) {
+ if (!android::base::ReadFdToString(fileno(fp), &ret)) ret = "";
+ pclose(fp);
+ }
+ return ret;
+}
+
+static void confirm(std::string&& data, const fs_path_config* config,
+ ssize_t num_config) {
+ const struct fs_path_config_from_file* pc =
+ reinterpret_cast<const fs_path_config_from_file*>(data.c_str());
+ size_t len = data.size();
+
+ ASSERT_TRUE(config != NULL);
+ ASSERT_LT(0, num_config);
+
+ while (len > 0) {
+ uint16_t host_len = pc->len;
+ if (host_len > len) break;
+
+ EXPECT_EQ(config->mode, pc->mode);
+ EXPECT_EQ(config->uid, pc->uid);
+ EXPECT_EQ(config->gid, pc->gid);
+ EXPECT_EQ(config->capabilities, pc->capabilities);
+ EXPECT_STREQ(config->prefix, pc->prefix);
+
+ EXPECT_LT(0, num_config);
+ --num_config;
+ if (num_config >= 0) ++config;
+ pc = reinterpret_cast<const fs_path_config_from_file*>(
+ reinterpret_cast<const char*>(pc) + host_len);
+ len -= host_len;
+ }
+ EXPECT_EQ(0, num_config);
+}
+
+/* See local android_filesystem_config.h for test data */
+
+TEST(fs_conf_test, dirs) {
+ confirm(popenToString(
+ android::base::StringPrintf("%s -D", fs_config_generate_command)),
+ android_device_dirs, arraysize(android_device_dirs));
+}
+
+TEST(fs_conf_test, files) {
+ confirm(popenToString(
+ android::base::StringPrintf("%s -F", fs_config_generate_command)),
+ android_device_files, arraysize(android_device_files));
+}
+
+static const char vendor_str[] = "vendor/";
+static const char vendor_alt_str[] = "system/vendor/";
+static const char oem_str[] = "oem/";
+static const char oem_alt_str[] = "system/oem/";
+static const char odm_str[] = "odm/";
+static const char odm_alt_str[] = "system/odm/";
+
+TEST(fs_conf_test, system_dirs) {
+ std::vector<fs_path_config> dirs;
+ const fs_path_config* config = android_device_dirs;
+ for (size_t num = arraysize(android_device_dirs); num; --num) {
+ if (!android::base::StartsWith(config->prefix, vendor_str) &&
+ !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+ !android::base::StartsWith(config->prefix, oem_str) &&
+ !android::base::StartsWith(config->prefix, oem_alt_str) &&
+ !android::base::StartsWith(config->prefix, odm_str) &&
+ !android::base::StartsWith(config->prefix, odm_alt_str)) {
+ dirs.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -D -P -vendor,-oem,-odm", fs_config_generate_command)),
+ &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, vendor_dirs) {
+ std::vector<fs_path_config> dirs;
+ const fs_path_config* config = android_device_dirs;
+ for (size_t num = arraysize(android_device_dirs); num; --num) {
+ if (android::base::StartsWith(config->prefix, vendor_str) ||
+ android::base::StartsWith(config->prefix, vendor_alt_str)) {
+ dirs.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -D -P vendor", fs_config_generate_command)),
+ &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, oem_dirs) {
+ std::vector<fs_path_config> dirs;
+ const fs_path_config* config = android_device_dirs;
+ for (size_t num = arraysize(android_device_dirs); num; --num) {
+ if (android::base::StartsWith(config->prefix, oem_str) ||
+ android::base::StartsWith(config->prefix, oem_alt_str)) {
+ dirs.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -D -P oem", fs_config_generate_command)),
+ &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, odm_dirs) {
+ std::vector<fs_path_config> dirs;
+ const fs_path_config* config = android_device_dirs;
+ for (size_t num = arraysize(android_device_dirs); num; --num) {
+ if (android::base::StartsWith(config->prefix, odm_str) ||
+ android::base::StartsWith(config->prefix, odm_alt_str)) {
+ dirs.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -D -P odm", fs_config_generate_command)),
+ &dirs[0], dirs.size());
+}
+
+TEST(fs_conf_test, system_files) {
+ std::vector<fs_path_config> files;
+ const fs_path_config* config = android_device_files;
+ for (size_t num = arraysize(android_device_files); num; --num) {
+ if (!android::base::StartsWith(config->prefix, vendor_str) &&
+ !android::base::StartsWith(config->prefix, vendor_alt_str) &&
+ !android::base::StartsWith(config->prefix, oem_str) &&
+ !android::base::StartsWith(config->prefix, oem_alt_str) &&
+ !android::base::StartsWith(config->prefix, odm_str) &&
+ !android::base::StartsWith(config->prefix, odm_alt_str)) {
+ files.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -F -P -vendor,-oem,-odm", fs_config_generate_command)),
+ &files[0], files.size());
+}
+
+TEST(fs_conf_test, vendor_files) {
+ std::vector<fs_path_config> files;
+ const fs_path_config* config = android_device_files;
+ for (size_t num = arraysize(android_device_files); num; --num) {
+ if (android::base::StartsWith(config->prefix, vendor_str) ||
+ android::base::StartsWith(config->prefix, vendor_alt_str)) {
+ files.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -F -P vendor", fs_config_generate_command)),
+ &files[0], files.size());
+}
+
+TEST(fs_conf_test, oem_files) {
+ std::vector<fs_path_config> files;
+ const fs_path_config* config = android_device_files;
+ for (size_t num = arraysize(android_device_files); num; --num) {
+ if (android::base::StartsWith(config->prefix, oem_str) ||
+ android::base::StartsWith(config->prefix, oem_alt_str)) {
+ files.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -F -P oem", fs_config_generate_command)),
+ &files[0], files.size());
+}
+
+TEST(fs_conf_test, odm_files) {
+ std::vector<fs_path_config> files;
+ const fs_path_config* config = android_device_files;
+ for (size_t num = arraysize(android_device_files); num; --num) {
+ if (android::base::StartsWith(config->prefix, odm_str) ||
+ android::base::StartsWith(config->prefix, odm_alt_str)) {
+ files.emplace_back(*config);
+ }
+ ++config;
+ }
+ confirm(popenToString(android::base::StringPrintf(
+ "%s -F -P odm", fs_config_generate_command)),
+ &files[0], files.size());
+}
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 7c3679c..1d8090a 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -69,6 +69,7 @@
OPTIONS.add_missing = False
OPTIONS.rebuild_recovery = False
+OPTIONS.replace_recovery_patch_files_list = []
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
OPTIONS.is_signing = False
@@ -127,6 +128,12 @@
ofile.write(data)
ofile.close()
+ arc_name = "SYSTEM/" + fn
+ if arc_name in output_zip.namelist():
+ OPTIONS.replace_recovery_patch_files_list.append(arc_name)
+ else:
+ common.ZipWrite(output_zip, ofile.name, arc_name)
+
if OPTIONS.rebuild_recovery:
print("Building new recovery patch")
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
@@ -167,6 +174,43 @@
return img.name
+def AddDtbo(output_zip, prefix="IMAGES/"):
+ """Adds the DTBO image.
+
+ Uses the image under prefix if it already exists. Otherwise looks for the
+ image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
+ """
+
+ img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "dtbo.img")
+ if os.path.exists(img.input_name):
+ print("dtbo.img already exists in %s, no need to rebuild..." % (prefix,))
+ return img.input_name
+
+ dtbo_prebuilt_path = os.path.join(
+ OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img")
+ assert os.path.exists(dtbo_prebuilt_path)
+ shutil.copy(dtbo_prebuilt_path, img.name)
+
+ # AVB-sign the image as needed.
+ if OPTIONS.info_dict.get("board_avb_enable") == "true":
+ avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
+ part_size = OPTIONS.info_dict["dtbo_size"]
+ # The AVB hash footer will be replaced if already present.
+ cmd = [avbtool, "add_hash_footer", "--image", img.name,
+ "--partition_size", str(part_size), "--partition_name", "dtbo"]
+ cmd.extend(shlex.split(OPTIONS.info_dict["avb_signing_args"]))
+ args = OPTIONS.info_dict.get("board_avb_dtbo_add_hash_footer_args")
+ if args and args.strip():
+ cmd.extend(shlex.split(args))
+ p = common.Run(cmd, stdout=subprocess.PIPE)
+ p.communicate()
+ assert p.returncode == 0, \
+ "avbtool add_hash_footer of %s failed" % (img.name,)
+
+ img.Write()
+ return img.name
+
+
def CreateImage(input_dir, info_dict, what, output_file, block_list=None):
print("creating " + what + ".img...")
@@ -222,9 +266,14 @@
if block_list:
block_list.Write()
+ # Set the 'adjusted_partition_size' that excludes the verity blocks of the
+ # given image. When avb is enabled, this size is the max image size returned
+ # by the avb tool.
is_verity_partition = "verity_block_device" in image_props
- verity_supported = image_props.get("verity") == "true"
- if is_verity_partition and verity_supported:
+ verity_supported = (image_props.get("verity") == "true" or
+ image_props.get("board_avb_enable") == "true")
+ is_avb_enable = image_props.get("avb_hashtree_enable") == "true"
+ if verity_supported and (is_verity_partition or is_avb_enable):
adjusted_blocks_value = image_props.get("partition_size")
if adjusted_blocks_value:
adjusted_blocks_key = what + "_adjusted_partition_size"
@@ -286,20 +335,22 @@
def AddVBMeta(output_zip, boot_img_path, system_img_path, vendor_img_path,
- prefix="IMAGES/"):
+ dtbo_img_path, prefix="IMAGES/"):
"""Create a VBMeta image and store it in output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
- avbtool = os.getenv('AVBTOOL') or "avbtool"
+ avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
cmd = [avbtool, "make_vbmeta_image",
"--output", img.name,
"--include_descriptors_from_image", boot_img_path,
"--include_descriptors_from_image", system_img_path]
if vendor_img_path is not None:
cmd.extend(["--include_descriptors_from_image", vendor_img_path])
- if OPTIONS.info_dict.get("system_root_image", None) == "true":
+ if dtbo_img_path is not None:
+ cmd.extend(["--include_descriptors_from_image", dtbo_img_path])
+ if OPTIONS.info_dict.get("system_root_image") == "true":
cmd.extend(["--setup_rootfs_from_kernel", system_img_path])
- common.AppendAVBSigningArgs(cmd)
- args = OPTIONS.info_dict.get("board_avb_make_vbmeta_image_args", None)
+ cmd.extend(shlex.split(OPTIONS.info_dict["avb_signing_args"]))
+ args = OPTIONS.info_dict.get("board_avb_make_vbmeta_image_args")
if args and args.strip():
cmd.extend(shlex.split(args))
p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -376,6 +427,23 @@
img.Write()
+def ReplaceRecoveryPatchFiles(zip_filename):
+ """Update the related files under SYSTEM/ after rebuilding recovery."""
+
+ cmd = ["zip", "-d", zip_filename] + OPTIONS.replace_recovery_patch_files_list
+ p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ p.communicate()
+
+ output_zip = zipfile.ZipFile(zip_filename, "a",
+ compression=zipfile.ZIP_DEFLATED,
+ allowZip64=True)
+ for item in OPTIONS.replace_recovery_patch_files_list:
+ file_path = os.path.join(OPTIONS.input_tmp, item)
+ assert os.path.exists(file_path)
+ common.ZipWrite(output_zip, file_path, arcname=item)
+ common.ZipClose(output_zip)
+
+
def AddImagesToTargetFiles(filename):
if os.path.isdir(filename):
OPTIONS.input_tmp = os.path.abspath(filename)
@@ -462,7 +530,7 @@
banner("system")
system_img_path = AddSystem(
- output_zip, recovery_img=recovery_image, boot_img=boot_image)
+ output_zip, recovery_img=recovery_image, boot_img=boot_image)
vendor_img_path = None
if has_vendor:
banner("vendor")
@@ -475,13 +543,21 @@
AddUserdata(output_zip)
banner("cache")
AddCache(output_zip)
- if OPTIONS.info_dict.get("board_bpt_enable", None) == "true":
+
+ if OPTIONS.info_dict.get("board_bpt_enable") == "true":
banner("partition-table")
AddPartitionTable(output_zip)
- if OPTIONS.info_dict.get("board_avb_enable", None) == "true":
+
+ dtbo_img_path = None
+ if OPTIONS.info_dict.get("has_dtbo") == "true":
+ banner("dtbo")
+ dtbo_img_path = AddDtbo(output_zip)
+
+ if OPTIONS.info_dict.get("board_avb_enable") == "true":
banner("vbmeta")
boot_contents = boot_image.WriteToTemp()
- AddVBMeta(output_zip, boot_contents.name, system_img_path, vendor_img_path)
+ AddVBMeta(output_zip, boot_contents.name, system_img_path,
+ vendor_img_path, dtbo_img_path)
# For devices using A/B update, copy over images from RADIO/ and/or
# VENDOR_IMAGES/ to IMAGES/ and make sure we have all the needed
@@ -495,12 +571,14 @@
# partitions (if present), then write this file to target_files package.
care_map_list = []
for line in lines:
- if line.strip() == "system" and OPTIONS.info_dict.get(
- "system_verity_block_device", None) is not None:
+ if line.strip() == "system" and (
+ "system_verity_block_device" in OPTIONS.info_dict or
+ OPTIONS.info_dict.get("system_avb_hashtree_enable") == "true"):
assert os.path.exists(system_img_path)
care_map_list += GetCareMap("system", system_img_path)
- if line.strip() == "vendor" and OPTIONS.info_dict.get(
- "vendor_verity_block_device", None) is not None:
+ if line.strip() == "vendor" and (
+ "vendor_verity_block_device" in OPTIONS.info_dict or
+ OPTIONS.info_dict.get("vendor_avb_hashtree_enable") == "true"):
assert os.path.exists(vendor_img_path)
care_map_list += GetCareMap("vendor", vendor_img_path)
@@ -547,6 +625,9 @@
if output_zip:
common.ZipClose(output_zip)
+ if OPTIONS.replace_recovery_patch_files_list:
+ ReplaceRecoveryPatchFiles(output_zip.filename)
+
def main(argv):
def option_handler(o, a):
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index e385866..b8123c0 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -41,10 +41,10 @@
cmd = ['imgdiff', '-z'] if imgdiff else ['bsdiff']
cmd.extend([srcfile, tgtfile, patchfile])
- # Not using common.Run(), which would otherwise dump all the bsdiff/imgdiff
- # commands when OPTIONS.verbose is True - not useful for the case here, since
- # they contain temp filenames only.
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ # Don't dump the bsdiff/imgdiff commands, which are not useful for the case
+ # here, since they contain temp filenames only.
+ p = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
output, _ = p.communicate()
if p.returncode != 0:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 3094dca..de75a6b 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -97,18 +97,19 @@
simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
simg.AppendFillChunk(0, blocks)
-def AVBCalcMaxImageSize(avbtool, partition_size, additional_args):
+def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
"""Calculates max image size for a given partition size.
Args:
avbtool: String with path to avbtool.
+ footer_type: 'hash' or 'hashtree' for generating footer.
partition_size: The size of the partition in question.
additional_args: Additional arguments to pass to 'avbtool
add_hashtree_image'.
Returns:
The maximum image size or 0 if an error occurred.
"""
- cmdline = "%s add_hashtree_footer " % avbtool
+ cmdline = "%s add_%s_footer " % (avbtool, footer_type)
cmdline += "--partition_size %d " % partition_size
cmdline += "--calc_max_image_size "
cmdline += additional_args
@@ -118,13 +119,14 @@
else:
return int(output)
-def AVBAddHashtree(image_path, avbtool, partition_size, partition_name,
- signing_args, additional_args):
+def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
+ partition_name, signing_args, additional_args):
"""Adds dm-verity hashtree and AVB metadata to an image.
Args:
image_path: Path to image to modify.
avbtool: String with path to avbtool.
+ footer_type: 'hash' or 'hashtree' for generating footer.
partition_size: The size of the partition in question.
partition_name: The name of the partition - will be embedded in metadata.
signing_args: Arguments for signing the image.
@@ -133,7 +135,7 @@
Returns:
True if the operation succeeded.
"""
- cmdline = "%s add_hashtree_footer " % avbtool
+ cmdline = "%s add_%s_footer " % (avbtool, footer_type)
cmdline += "--partition_size %d " % partition_size
cmdline += "--partition_name %s " % partition_name
cmdline += "--image %s " % image_path
@@ -410,12 +412,19 @@
prop_dict["original_partition_size"] = str(partition_size)
prop_dict["verity_size"] = str(verity_size)
- # Adjust partition size for AVB.
- if prop_dict.get("avb_enable") == "true":
+ # Adjust partition size for AVB hash footer or AVB hashtree footer.
+ avb_footer_type = ''
+ if prop_dict.get("avb_hash_enable") == "true":
+ avb_footer_type = 'hash'
+ elif prop_dict.get("avb_hashtree_enable") == "true":
+ avb_footer_type = 'hashtree'
+
+ if avb_footer_type:
avbtool = prop_dict.get("avb_avbtool")
partition_size = int(prop_dict.get("partition_size"))
- additional_args = prop_dict["avb_add_hashtree_footer_args"]
- max_image_size = AVBCalcMaxImageSize(avbtool, partition_size,
+ # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
+ additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
+ max_image_size = AVBCalcMaxImageSize(avbtool, avb_footer_type, partition_size,
additional_args)
if max_image_size == 0:
return False
@@ -561,15 +570,16 @@
if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
return False
- # Add AVB hashtree and metadata.
- if "avb_enable" in prop_dict:
+ # Add AVB HASH or HASHTREE footer (metadata).
+ if avb_footer_type:
avbtool = prop_dict.get("avb_avbtool")
original_partition_size = int(prop_dict.get("original_partition_size"))
partition_name = prop_dict["partition_name"]
signing_args = prop_dict["avb_signing_args"]
- additional_args = prop_dict["avb_add_hashtree_footer_args"]
- if not AVBAddHashtree(out_file, avbtool, original_partition_size,
- partition_name, signing_args, additional_args):
+ # avb_add_hash_footer_args or avb_add_hashtree_footer_args
+ additional_args = prop_dict["avb_add_" + avb_footer_type + "_footer_args"]
+ if not AVBAddFooter(out_file, avbtool, avb_footer_type, original_partition_size,
+ partition_name, signing_args, additional_args):
return False
if run_fsck and prop_dict.get("skip_fsck") != "true":
@@ -614,6 +624,7 @@
"verity_key",
"verity_signer_cmd",
"verity_fec",
+ "board_avb_enable",
"avb_signing_args",
"avb_avbtool"
)
@@ -639,7 +650,7 @@
copy_prop("system_squashfs_block_size", "squashfs_block_size")
copy_prop("system_squashfs_disable_4k_align", "squashfs_disable_4k_align")
copy_prop("system_base_fs_file", "base_fs_file")
- copy_prop("system_avb_enable", "avb_enable")
+ copy_prop("system_avb_hashtree_enable", "avb_hashtree_enable")
copy_prop("system_avb_add_hashtree_footer_args",
"avb_add_hashtree_footer_args")
copy_prop("system_extfs_inode_count", "extfs_inode_count")
@@ -656,7 +667,7 @@
copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
copy_prop("system_squashfs_block_size", "squashfs_block_size")
copy_prop("system_base_fs_file", "base_fs_file")
- copy_prop("system_avb_enable", "avb_enable")
+ copy_prop("system_avb_hashtree_enable", "avb_hashtree_enable")
copy_prop("system_avb_add_hashtree_footer_args",
"avb_add_hashtree_footer_args")
copy_prop("system_extfs_inode_count", "extfs_inode_count")
@@ -681,7 +692,7 @@
copy_prop("vendor_squashfs_block_size", "squashfs_block_size")
copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align")
copy_prop("vendor_base_fs_file", "base_fs_file")
- copy_prop("vendor_avb_enable", "avb_enable")
+ copy_prop("vendor_avb_hashtree_enable", "avb_hashtree_enable")
copy_prop("vendor_avb_add_hashtree_footer_args",
"avb_add_hashtree_footer_args")
copy_prop("vendor_extfs_inode_count", "extfs_inode_count")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index e200f9f..652fadf 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -107,10 +107,15 @@
pass
-def Run(args, **kwargs):
- """Create and return a subprocess.Popen object, printing the command
- line on the terminal if -v was specified."""
- if OPTIONS.verbose:
+def Run(args, verbose=None, **kwargs):
+ """Create and return a subprocess.Popen object.
+
+ Caller can specify if the command line should be printed. The global
+ OPTIONS.verbose will be used if not specified.
+ """
+ if verbose is None:
+ verbose = OPTIONS.verbose
+ if verbose:
print(" running: ", " ".join(args))
return subprocess.Popen(args, **kwargs)
@@ -340,16 +345,6 @@
print("%-25s = (%s) %s" % (k, type(v).__name__, v))
-def AppendAVBSigningArgs(cmd):
- """Append signing arguments for avbtool."""
- keypath = OPTIONS.info_dict.get("board_avb_key_path", None)
- algorithm = OPTIONS.info_dict.get("board_avb_algorithm", None)
- if not keypath or not algorithm:
- algorithm = "SHA256_RSA4096"
- keypath = "external/avb/test/data/testkey_rsa4096.pem"
- cmd.extend(["--key", keypath, "--algorithm", algorithm])
-
-
def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
has_ramdisk=False, two_step_image=False):
"""Build a bootable image from the specified sourcedir.
@@ -486,12 +481,12 @@
# AVB: if enabled, calculate and add hash to boot.img.
if info_dict.get("board_avb_enable", None) == "true":
- avbtool = os.getenv('AVBTOOL') or "avbtool"
- part_size = info_dict.get("boot_size", None)
+ avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
+ part_size = info_dict["boot_size"]
cmd = [avbtool, "add_hash_footer", "--image", img.name,
"--partition_size", str(part_size), "--partition_name", "boot"]
- AppendAVBSigningArgs(cmd)
- args = info_dict.get("board_avb_boot_add_hash_footer_args", None)
+ cmd.extend(shlex.split(info_dict["avb_signing_args"]))
+ args = info_dict.get("board_avb_boot_add_hash_footer_args")
if args and args.strip():
cmd.extend(shlex.split(args))
p = Run(cmd, stdout=subprocess.PIPE)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 860853c..0c44faf 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -230,11 +230,6 @@
p.mount_point, mount_flags))
self.mounts.add(p.mount_point)
- def UnpackPackageDir(self, src, dst):
- """Unpack a given directory from the OTA package into the given
- destination directory."""
- self.script.append('package_extract_dir("%s", "%s");' % (src, dst))
-
def Comment(self, comment):
"""Write a comment into the update script."""
self.script.append("")
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index fd98ad2..4422b53 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -71,63 +71,23 @@
common.Usage(__doc__)
sys.exit(1)
- OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
+ OPTIONS.input_tmp, input_zip = common.UnzipTemp(
+ args[0], ["IMAGES/*", "OTA/*"])
output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
CopyInfo(output_zip)
try:
- done = False
images_path = os.path.join(OPTIONS.input_tmp, "IMAGES")
- if os.path.exists(images_path):
- # If this is a new target-files, it already contains the images,
- # and all we have to do is copy them to the output zip.
- images = os.listdir(images_path)
- if images:
- for image in images:
- if bootable_only and image not in ("boot.img", "recovery.img"):
- continue
- if not image.endswith(".img"):
- continue
- if image == "recovery-two-step.img":
- continue
- common.ZipWrite(
- output_zip, os.path.join(images_path, image), image)
- done = True
-
- if not done:
- # We have an old target-files that doesn't already contain the
- # images, so build them.
- import add_img_to_target_files
-
- OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
-
- boot_image = common.GetBootableImage(
- "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
- if boot_image:
- boot_image.AddToZip(output_zip)
-
- if OPTIONS.info_dict.get("no_recovery") != "true":
- recovery_image = common.GetBootableImage(
- "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
- if recovery_image:
- recovery_image.AddToZip(output_zip)
-
- def banner(s):
- print("\n\n++++ " + s + " ++++\n\n")
-
- if not bootable_only:
- banner("AddSystem")
- add_img_to_target_files.AddSystem(output_zip, prefix="")
- try:
- input_zip.getinfo("VENDOR/")
- banner("AddVendor")
- add_img_to_target_files.AddVendor(output_zip, prefix="")
- except KeyError:
- pass # no vendor partition for this device
- banner("AddUserdata")
- add_img_to_target_files.AddUserdata(output_zip, prefix="")
- banner("AddCache")
- add_img_to_target_files.AddCache(output_zip, prefix="")
+ # A target-files zip must contain the images since Lollipop.
+ assert os.path.exists(images_path)
+ for image in sorted(os.listdir(images_path)):
+ if bootable_only and image not in ("boot.img", "recovery.img"):
+ continue
+ if not image.endswith(".img"):
+ continue
+ if image == "recovery-two-step.img":
+ continue
+ common.ZipWrite(output_zip, os.path.join(images_path, image), image)
finally:
print("cleaning up...")
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 1a7e10e..1c8fe65 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -181,14 +181,14 @@
OPTIONS.payload_signer = None
OPTIONS.payload_signer_args = []
OPTIONS.extracted_input = None
+OPTIONS.key_passwords = []
METADATA_NAME = 'META-INF/com/android/metadata'
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
def SignOutput(temp_zip_name, output_zip_name):
- key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
- pw = key_passwords[OPTIONS.package_key]
+ pw = OPTIONS.key_passwords[OPTIONS.package_key]
common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
whole_file=True)
@@ -1021,21 +1021,17 @@
# The place where the output from the subprocess should go.
log_file = sys.stdout if OPTIONS.verbose else subprocess.PIPE
- # Setup signing keys.
- if OPTIONS.package_key is None:
- OPTIONS.package_key = OPTIONS.info_dict.get(
- "default_system_dev_certificate",
- "build/target/product/security/testkey")
-
# A/B updater expects a signing key in RSA format. Gets the key ready for
# later use in step 3, unless a payload_signer has been specified.
if OPTIONS.payload_signer is None:
cmd = ["openssl", "pkcs8",
"-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
- "-inform", "DER", "-nocrypt"]
+ "-inform", "DER"]
+ pw = OPTIONS.key_passwords[OPTIONS.package_key]
+ cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
cmd.extend(["-out", rsa_key])
- p1 = common.Run(cmd, stdout=log_file, stderr=subprocess.STDOUT)
+ p1 = common.Run(cmd, verbose=False, stdout=log_file, stderr=subprocess.STDOUT)
p1.communicate()
assert p1.returncode == 0, "openssl pkcs8 failed"
@@ -1171,7 +1167,8 @@
# If dm-verity is supported for the device, copy contents of care_map
# into A/B OTA package.
target_zip = zipfile.ZipFile(target_file, "r")
- if OPTIONS.info_dict.get("verity") == "true":
+ if (OPTIONS.info_dict.get("verity") == "true" or
+ OPTIONS.info_dict.get("board_avb_enable") == "true"):
care_map_path = "META/care_map.txt"
namelist = target_zip.namelist()
if care_map_path in namelist:
@@ -1383,6 +1380,17 @@
ab_update = OPTIONS.info_dict.get("ab_update") == "true"
+ # Use the default key to sign the package if not specified with package_key.
+ # package_keys are needed on ab_updates, so always define them if an
+ # ab_update is getting created.
+ if not OPTIONS.no_signing or ab_update:
+ if OPTIONS.package_key is None:
+ OPTIONS.package_key = OPTIONS.info_dict.get(
+ "default_system_dev_certificate",
+ "build/target/product/security/testkey")
+ # Get signing keys
+ OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+
if ab_update:
if OPTIONS.incremental_source is not None:
OPTIONS.target_info_dict = OPTIONS.info_dict
@@ -1448,13 +1456,6 @@
raise common.ExternalError(
"--- target build has specified no recovery ---")
- # Use the default key to sign the package if not specified with package_key.
- if not OPTIONS.no_signing:
- if OPTIONS.package_key is None:
- OPTIONS.package_key = OPTIONS.info_dict.get(
- "default_system_dev_certificate",
- "build/target/product/security/testkey")
-
# Set up the output zip. Create a temporary zip file if signing is needed.
if OPTIONS.no_signing:
if os.path.exists(args[1]):
diff --git a/tools/releasetools/ota_package_parser.py b/tools/releasetools/ota_package_parser.py
new file mode 100755
index 0000000..331122b
--- /dev/null
+++ b/tools/releasetools/ota_package_parser.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import sys
+import traceback
+import zipfile
+
+from rangelib import RangeSet
+
+class Stash(object):
+ """Build a map to track stashed blocks during update simulation."""
+
+ def __init__(self):
+ self.blocks_stashed = 0
+ self.overlap_blocks_stashed = 0
+ self.max_stash_needed = 0
+ self.current_stash_size = 0
+ self.stash_map = {}
+
+ def StashBlocks(self, SHA1, blocks):
+ if SHA1 in self.stash_map:
+ logging.info("already stashed {}: {}".format(SHA1, blocks))
+ return
+ self.blocks_stashed += blocks.size()
+ self.current_stash_size += blocks.size()
+ self.max_stash_needed = max(self.current_stash_size, self.max_stash_needed)
+ self.stash_map[SHA1] = blocks
+
+ def FreeBlocks(self, SHA1):
+ assert self.stash_map.has_key(SHA1), "stash {} not found".format(SHA1)
+ self.current_stash_size -= self.stash_map[SHA1].size()
+ del self.stash_map[SHA1]
+
+ def HandleOverlapBlocks(self, SHA1, blocks):
+ self.StashBlocks(SHA1, blocks)
+ self.overlap_blocks_stashed += blocks.size()
+ self.FreeBlocks(SHA1)
+
+
+class OtaPackageParser(object):
+ """Parse a block-based OTA package."""
+
+ def __init__(self, package):
+ self.package = package
+ self.new_data_size = 0
+ self.patch_data_size = 0
+ self.block_written = 0
+ self.block_stashed = 0
+
+ @staticmethod
+ def GetSizeString(size):
+ assert size >= 0
+ base = 1024.0
+ if size <= base:
+ return "{} bytes".format(size)
+ for units in ['K', 'M', 'G']:
+ if size <= base * 1024 or units == 'G':
+ return "{:.1f}{}".format(size / base, units)
+ base *= 1024
+
+ def ParseTransferList(self, name):
+ """Simulate the transfer commands and calculate the amout of I/O."""
+
+ logging.info("\nSimulating commands in '{}':".format(name))
+ lines = self.package.read(name).strip().splitlines()
+ assert len(lines) >= 4, "{} is too short; Transfer list expects at least" \
+ "4 lines, it has {}".format(name, len(lines))
+ assert int(lines[0]) >= 3
+ logging.info("(version: {})".format(lines[0]))
+
+ blocks_written = 0
+ my_stash = Stash()
+ for line in lines[4:]:
+ cmd_list = line.strip().split(" ")
+ cmd_name = cmd_list[0]
+ try:
+ if cmd_name == "new" or cmd_name == "zero":
+ assert len(cmd_list) == 2, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[1])
+ blocks_written += target_range.size()
+ elif cmd_name == "move":
+ # Example: move <onehash> <tgt_range> <src_blk_count> <src_range>
+ # [<loc_range> <stashed_blocks>]
+ assert len(cmd_list) >= 5, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[2])
+ blocks_written += target_range.size()
+ if cmd_list[4] == '-':
+ continue
+ SHA1 = cmd_list[1]
+ source_range = RangeSet.parse_raw(cmd_list[4])
+ if target_range.overlaps(source_range):
+ my_stash.HandleOverlapBlocks(SHA1, source_range)
+ elif cmd_name == "bsdiff" or cmd_name == "imgdiff":
+ # Example: bsdiff <offset> <len> <src_hash> <tgt_hash> <tgt_range>
+ # <src_blk_count> <src_range> [<loc_range> <stashed_blocks>]
+ assert len(cmd_list) >= 8, "command format error: {}".format(line)
+ target_range = RangeSet.parse_raw(cmd_list[5])
+ blocks_written += target_range.size()
+ if cmd_list[7] == '-':
+ continue
+ source_SHA1 = cmd_list[3]
+ source_range = RangeSet.parse_raw(cmd_list[7])
+ if target_range.overlaps(source_range):
+ my_stash.HandleOverlapBlocks(source_SHA1, source_range)
+ elif cmd_name == "stash":
+ assert len(cmd_list) == 3, "command format error: {}".format(line)
+ SHA1 = cmd_list[1]
+ source_range = RangeSet.parse_raw(cmd_list[2])
+ my_stash.StashBlocks(SHA1, source_range)
+ elif cmd_name == "free":
+ assert len(cmd_list) == 2, "command format error: {}".format(line)
+ SHA1 = cmd_list[1]
+ my_stash.FreeBlocks(SHA1)
+ except:
+ logging.error("failed to parse command in: " + line)
+ raise
+
+ self.block_written += blocks_written
+ self.block_stashed += my_stash.blocks_stashed
+
+ logging.info("blocks written: {} (expected: {})".format(
+ blocks_written, lines[1]))
+ logging.info("max blocks stashed simultaneously: {} (expected: {})".
+ format(my_stash.max_stash_needed, lines[3]))
+ logging.info("total blocks stashed: {}".format(my_stash.blocks_stashed))
+ logging.info("blocks stashed implicitly: {}".format(
+ my_stash.overlap_blocks_stashed))
+
+ def PrintDataInfo(self, partition):
+ logging.info("\nReading data info for {} partition:".format(partition))
+ new_data = self.package.getinfo(partition + ".new.dat")
+ patch_data = self.package.getinfo(partition + ".patch.dat")
+ logging.info("{:<40}{:<40}".format(new_data.filename, patch_data.filename))
+ logging.info("{:<40}{:<40}".format(
+ "compress_type: " + str(new_data.compress_type),
+ "compress_type: " + str(patch_data.compress_type)))
+ logging.info("{:<40}{:<40}".format(
+ "compressed_size: " + OtaPackageParser.GetSizeString(
+ new_data.compress_size),
+ "compressed_size: " + OtaPackageParser.GetSizeString(
+ patch_data.compress_size)))
+ logging.info("{:<40}{:<40}".format(
+ "file_size: " + OtaPackageParser.GetSizeString(new_data.file_size),
+ "file_size: " + OtaPackageParser.GetSizeString(patch_data.file_size)))
+
+ self.new_data_size += new_data.file_size
+ self.patch_data_size += patch_data.file_size
+
+ def AnalyzePartition(self, partition):
+ assert partition in ("system", "vendor")
+ assert partition + ".new.dat" in self.package.namelist()
+ assert partition + ".patch.dat" in self.package.namelist()
+ assert partition + ".transfer.list" in self.package.namelist()
+
+ self.PrintDataInfo(partition)
+ self.ParseTransferList(partition + ".transfer.list")
+
+ def PrintMetadata(self):
+ metadata_path = "META-INF/com/android/metadata"
+ logging.info("\nMetadata info:")
+ metadata_info = {}
+ for line in self.package.read(metadata_path).strip().splitlines():
+ index = line.find("=")
+ metadata_info[line[0 : index].strip()] = line[index + 1:].strip()
+ assert metadata_info.get("ota-type") == "BLOCK"
+ assert "pre-device" in metadata_info
+ logging.info("device: {}".format(metadata_info["pre-device"]))
+ if "pre-build" in metadata_info:
+ logging.info("pre-build: {}".format(metadata_info["pre-build"]))
+ assert "post-build" in metadata_info
+ logging.info("post-build: {}".format(metadata_info["post-build"]))
+
+ def Analyze(self):
+ logging.info("Analyzing ota package: " + self.package.filename)
+ self.PrintMetadata()
+ assert "system.new.dat" in self.package.namelist()
+ self.AnalyzePartition("system")
+ if "vendor.new.dat" in self.package.namelist():
+ self.AnalyzePartition("vendor")
+
+ #TODO Add analysis of other partitions(e.g. bootloader, boot, radio)
+
+ BLOCK_SIZE = 4096
+ logging.info("\nOTA package analyzed:")
+ logging.info("new data size (uncompressed): " +
+ OtaPackageParser.GetSizeString(self.new_data_size))
+ logging.info("patch data size (uncompressed): " +
+ OtaPackageParser.GetSizeString(self.patch_data_size))
+ logging.info("total data written: " +
+ OtaPackageParser.GetSizeString(self.block_written * BLOCK_SIZE))
+ logging.info("total data stashed: " +
+ OtaPackageParser.GetSizeString(self.block_stashed * BLOCK_SIZE))
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description='Analyze an OTA package.')
+ parser.add_argument("ota_package", help='Path of the OTA package.')
+ args = parser.parse_args(argv)
+
+ logging_format = '%(message)s'
+ logging.basicConfig(level=logging.INFO, format=logging_format)
+
+ try:
+ with zipfile.ZipFile(args.ota_package, 'r') as package:
+ package_parser = OtaPackageParser(package)
+ package_parser.Analyze()
+ except:
+ logging.error("Failed to read " + args.ota_package)
+ traceback.print_exc()
+ sys.exit(1)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 2e0b44d..b9bb4d0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -92,8 +92,6 @@
import errno
import os
import re
-import shutil
-import stat
import subprocess
import tempfile
import zipfile
@@ -105,6 +103,7 @@
OPTIONS.extra_apks = {}
OPTIONS.key_map = {}
+OPTIONS.rebuild_recovery = False
OPTIONS.replace_ota_keys = False
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
@@ -187,30 +186,8 @@
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
- rebuild_recovery = False
system_root_image = misc_info.get("system_root_image") == "true"
- # tmpdir will only be used to regenerate the recovery-from-boot patch.
- tmpdir = tempfile.mkdtemp()
- # We're not setting the permissions precisely as in attr, because that work
- # will be handled by mkbootfs (using the values from the canned or the
- # compiled-in fs_config).
- def write_to_temp(fn, attr, data):
- fn = os.path.join(tmpdir, fn)
- if fn.endswith("/"):
- fn = os.path.join(tmpdir, fn)
- os.mkdir(fn)
- else:
- d = os.path.dirname(fn)
- if d and not os.path.exists(d):
- os.makedirs(d)
-
- if stat.S_ISLNK(attr >> 16):
- os.symlink(data, fn)
- else:
- with open(fn, "wb") as f:
- f.write(data)
-
for info in input_tf_zip.infolist():
if info.filename.startswith("IMAGES/"):
continue
@@ -241,21 +218,17 @@
print "rewriting %s:" % (info.filename,)
new_data = RewriteProps(data, misc_info)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
- if info.filename in ("BOOT/RAMDISK/default.prop",
- "ROOT/default.prop",
- "RECOVERY/RAMDISK/default.prop"):
- write_to_temp(info.filename, info.external_attr, new_data)
elif info.filename.endswith("mac_permissions.xml"):
print "rewriting %s with new keys." % (info.filename,)
new_data = ReplaceCerts(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
- # Trigger a rebuild of the recovery patch if needed.
+ # Ask add_img_to_target_files to rebuild the recovery patch if needed.
elif info.filename in ("SYSTEM/recovery-from-boot.p",
"SYSTEM/etc/recovery.img",
"SYSTEM/bin/install-recovery.sh"):
- rebuild_recovery = True
+ OPTIONS.rebuild_recovery = True
# Don't copy OTA keys if we're replacing them.
elif (OPTIONS.replace_ota_keys and
@@ -287,31 +260,12 @@
elif info.filename == "META/care_map.txt":
pass
- # Copy BOOT/, RECOVERY/, META/, ROOT/ to rebuild recovery patch. This case
- # must come AFTER other matching rules.
- elif (info.filename.startswith("BOOT/") or
- info.filename.startswith("RECOVERY/") or
- info.filename.startswith("META/") or
- info.filename.startswith("ROOT/") or
- info.filename == "SYSTEM/etc/recovery-resource.dat"):
- write_to_temp(info.filename, info.external_attr, data)
- common.ZipWriteStr(output_tf_zip, out_info, data)
-
# A non-APK file; copy it verbatim.
else:
common.ZipWriteStr(output_tf_zip, out_info, data)
if OPTIONS.replace_ota_keys:
- new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
- if new_recovery_keys:
- if system_root_image:
- recovery_keys_location = "BOOT/RAMDISK/res/keys"
- else:
- recovery_keys_location = "RECOVERY/RAMDISK/res/keys"
- # The "new_recovery_keys" has been already written into the output_tf_zip
- # while calling ReplaceOtaKeys(). We're just putting the same copy to
- # tmpdir in case we need to regenerate the recovery-from-boot patch.
- write_to_temp(recovery_keys_location, 0o755 << 16, new_recovery_keys)
+ ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
# Replace the keyid string in META/misc_info.txt.
if OPTIONS.replace_verity_private_key:
@@ -325,33 +279,13 @@
dest = "BOOT/RAMDISK/verity_key"
# We are replacing the one in boot image only, since the one under
# recovery won't ever be needed.
- new_data = ReplaceVerityPublicKey(
+ ReplaceVerityPublicKey(
output_tf_zip, dest, OPTIONS.replace_verity_public_key[1])
- write_to_temp(dest, 0o755 << 16, new_data)
# Replace the keyid string in BOOT/cmdline.
if OPTIONS.replace_verity_keyid:
- new_cmdline = ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
- OPTIONS.replace_verity_keyid[1])
- # Writing the new cmdline to tmpdir is redundant as the bootimage
- # gets build in the add_image_to_target_files and rebuild_recovery
- # is not exercised while building the boot image for the A/B
- # path
- write_to_temp("BOOT/cmdline", 0o755 << 16, new_cmdline)
-
- if rebuild_recovery:
- recovery_img = common.GetBootableImage(
- "recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
- boot_img = common.GetBootableImage(
- "boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
-
- def output_sink(fn, data):
- common.ZipWriteStr(output_tf_zip, "SYSTEM/" + fn, data)
-
- common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
- info_dict=misc_info)
-
- shutil.rmtree(tmpdir)
+ ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
+ OPTIONS.replace_verity_keyid[1])
def ReplaceCerts(data):
@@ -715,7 +649,12 @@
common.ZipClose(output_zip)
# Skip building userdata.img and cache.img when signing the target files.
- new_args = ["--is_signing", args[1]]
+ new_args = ["--is_signing"]
+ # add_img_to_target_files builds the system image from scratch, so the
+ # recovery patch is guaranteed to be regenerated there.
+ if OPTIONS.rebuild_recovery:
+ new_args.append("--rebuild_recovery")
+ new_args.append(args[1])
add_img_to_target_files.main(new_args)
print "done."
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
new file mode 100755
index 0000000..1dd3159
--- /dev/null
+++ b/tools/releasetools/validate_target_files.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Validate a given (signed) target_files.zip.
+
+It performs checks to ensure the integrity of the input zip.
+ - It verifies the file consistency between the ones in IMAGES/system.img (read
+ via IMAGES/system.map) and the ones under unpacked folder of SYSTEM/. The
+ same check also applies to the vendor image if present.
+"""
+
+import common
+import logging
+import os.path
+import sparse_img
+import sys
+
+
+def _GetImage(which, tmpdir):
+ assert which in ('system', 'vendor')
+
+ path = os.path.join(tmpdir, 'IMAGES', which + '.img')
+ mappath = os.path.join(tmpdir, 'IMAGES', which + '.map')
+
+ # Map file must exist (allowed to be empty).
+ assert os.path.exists(path) and os.path.exists(mappath)
+
+ clobbered_blocks = '0'
+ return sparse_img.SparseImage(path, mappath, clobbered_blocks)
+
+
+def ValidateFileConsistency(input_zip, input_tmp):
+ """Compare the files from image files and unpacked folders."""
+
+ def RoundUpTo4K(value):
+ rounded_up = value + 4095
+ return rounded_up - (rounded_up % 4096)
+
+ def CheckAllFiles(which):
+ logging.info('Checking %s image.', which)
+ image = _GetImage(which, input_tmp)
+ prefix = '/' + which
+ for entry in image.file_map:
+ if not entry.startswith(prefix):
+ continue
+
+ # Read the blocks that the file resides. Note that it will contain the
+ # bytes past the file length, which is expected to be padded with '\0's.
+ ranges = image.file_map[entry]
+ blocks_sha1 = image.RangeSha1(ranges)
+
+ # The filename under unpacked directory, such as SYSTEM/bin/sh.
+ unpacked_name = os.path.join(
+ input_tmp, which.upper(), entry[(len(prefix) + 1):])
+ with open(unpacked_name) as f:
+ file_data = f.read()
+ file_size = len(file_data)
+ file_size_rounded_up = RoundUpTo4K(file_size)
+ file_data += '\0' * (file_size_rounded_up - file_size)
+ file_sha1 = common.File(entry, file_data).sha1
+
+ assert blocks_sha1 == file_sha1, \
+ 'file: %s, range: %s, blocks_sha1: %s, file_sha1: %s' % (
+ entry, ranges, blocks_sha1, file_sha1)
+
+ logging.info('Validating file consistency.')
+
+ # Verify IMAGES/system.img.
+ CheckAllFiles('system')
+
+ # Verify IMAGES/vendor.img if applicable.
+ if 'VENDOR/' in input_zip.namelist():
+ CheckAllFiles('vendor')
+
+ # Not checking IMAGES/system_other.img since it doesn't have the map file.
+
+
+def main(argv):
+ def option_handler():
+ return True
+
+ args = common.ParseOptions(
+ argv, __doc__, extra_opts="",
+ extra_long_opts=[],
+ extra_option_handler=option_handler)
+
+ if len(args) != 1:
+ common.Usage(__doc__)
+ sys.exit(1)
+
+ logging_format = '%(asctime)s - %(filename)s - %(levelname)-8s: %(message)s'
+ date_format = '%Y/%m/%d %H:%M:%S'
+ logging.basicConfig(level=logging.INFO, format=logging_format,
+ datefmt=date_format)
+
+ logging.info("Unzipping the input target_files.zip: %s", args[0])
+ input_tmp, input_zip = common.UnzipTemp(args[0])
+
+ ValidateFileConsistency(input_zip, input_tmp)
+
+ # TODO: Check if the OTA keys have been properly updated (the ones on /system,
+ # in recovery image).
+
+ # TODO(b/35411009): Verify the contents in /system/bin/install-recovery.sh.
+
+ logging.info("Done.")
+
+
+if __name__ == '__main__':
+ try:
+ main(sys.argv[1:])
+ finally:
+ common.Cleanup()
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
index 4506e2f..051a51d 100644
--- a/tools/signapk/Android.mk
+++ b/tools/signapk/Android.mk
@@ -30,7 +30,6 @@
include $(BUILD_HOST_JAVA_LIBRARY)
ifeq ($(TARGET_BUILD_APPS),)
-ifeq ($(BRILLO),)
# The post-build signing tools need signapk.jar and its shared libraries,
# but we don't need this if we're just doing unbundled apps.
my_dist_files := $(LOCAL_INSTALLED_MODULE) \
@@ -39,4 +38,3 @@
$(call dist-for-goals,droidcore,$(my_dist_files))
my_dist_files :=
endif
-endif
diff --git a/tools/warn.py b/tools/warn.py
index 5be6d9d..44ad368 100755
--- a/tools/warn.py
+++ b/tools/warn.py
@@ -73,14 +73,9 @@
# New dynamic HTML related function to emit data:
# escape_string, strip_escape_string, emit_warning_arrays
# emit_js_data():
-#
-# To emit csv files of warning message counts:
-# flag --gencsv
-# description_for_csv, string_for_csv:
-# count_severity(sev, kind):
-# dump_csv():
import argparse
+import csv
import multiprocessing
import os
import re
@@ -88,6 +83,9 @@
import sys
parser = argparse.ArgumentParser(description='Convert a build log into HTML')
+parser.add_argument('--csvpath',
+ help='Save CSV warning file to the passed absolute path',
+ default=None)
parser.add_argument('--gencsv',
help='Generate a CSV file with number of various warnings',
action='store_true',
@@ -140,6 +138,24 @@
column_headers = [a[1] for a in attributes]
headers = [a[2] for a in attributes]
+
+def tidy_warn_pattern(description, pattern):
+ return {
+ 'category': 'C/C++',
+ 'severity': Severity.TIDY,
+ 'description': 'clang-tidy ' + description,
+ 'patterns': [r'.*: .+\[' + pattern + r'\]$']
+ }
+
+
+def simple_tidy_warn_pattern(description):
+ return tidy_warn_pattern(description, description)
+
+
+def group_tidy_warn_pattern(description):
+ return tidy_warn_pattern(description, description + r'-.+')
+
+
warn_patterns = [
# pylint:disable=line-too-long,g-inconsistent-quotes
{'category': 'C/C++', 'severity': Severity.ANALYZER,
@@ -225,6 +241,7 @@
'description': 'Unused function, variable or label',
'patterns': [r".*: warning: '.+' defined but not used",
r".*: warning: unused function '.+'",
+ r".*: warning: lambda capture .* is not used",
r".*: warning: private field '.+' is not used",
r".*: warning: unused variable '.+'"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wunused-value',
@@ -259,6 +276,9 @@
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wformat-extra-args',
'description': 'Too many arguments for format string',
'patterns': [r".*: warning: too many arguments for format"]},
+ {'category': 'C/C++', 'severity': Severity.MEDIUM,
+ 'description': 'Too many arguments in call',
+ 'patterns': [r".*: warning: too many arguments in call to "]},
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wformat-invalid-specifier',
'description': 'Invalid format specifier',
'patterns': [r".*: warning: invalid .+ specifier '.+'.+format-invalid-specifier"]},
@@ -438,6 +458,9 @@
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wswitch-enum',
'description': 'Enum value not handled in switch',
'patterns': [r".*: warning: .*enumeration value.* not handled in switch.+Wswitch"]},
+ {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wuser-defined-warnings',
+ 'description': 'User defined warnings',
+ 'patterns': [r".*: warning: .* \[-Wuser-defined-warnings\]$"]},
{'category': 'java', 'severity': Severity.MEDIUM, 'option': '-encoding',
'description': 'Java: Non-ascii characters used, but ascii encoding specified',
'patterns': [r".*: warning: unmappable character for encoding ascii"]},
@@ -453,6 +476,9 @@
{'category': 'java', 'severity': Severity.MEDIUM,
'description': '_ used as an identifier',
'patterns': [r".*: warning: '_' used as an identifier"]},
+ {'category': 'java', 'severity': Severity.HIGH,
+ 'description': 'Use of internal proprietary API',
+ 'patterns': [r".*: warning: .* is internal proprietary API and may be removed"]},
# Warnings from Javac
{'category': 'java',
@@ -1384,6 +1410,9 @@
'description': 'Taking address of temporary',
'patterns': [r".*: warning: taking address of temporary"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM,
+ 'description': 'Taking address of packed member',
+ 'patterns': [r".*: warning: taking address of packed member"]},
+ {'category': 'C/C++', 'severity': Severity.MEDIUM,
'description': 'Possible broken line continuation',
'patterns': [r".*: warning: backslash and newline separated by space"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wundefined-var-template',
@@ -1527,6 +1556,9 @@
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wconversion-null',
'description': 'Converting to non-pointer type from NULL',
'patterns': [r".*: warning: converting to non-pointer type '.+' from NULL"]},
+ {'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wsign-conversion',
+ 'description': 'Implicit sign conversion',
+ 'patterns': [r".*: warning: implicit conversion changes signedness"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM, 'option': '-Wnull-conversion',
'description': 'Converting NULL to non-pointer type',
'patterns': [r".*: warning: implicit conversion of NULL constant to '.+'"]},
@@ -1656,6 +1688,9 @@
{'category': 'C/C++', 'severity': Severity.LOW, 'option': '-Winvalid-pp-token',
'description': 'Invalid pp token',
'patterns': [r".*: warning: missing .+Winvalid-pp-token"]},
+ {'category': 'link', 'severity': Severity.LOW,
+ 'description': 'need glibc to link',
+ 'patterns': [r".*: warning: .* requires at runtime .* glibc .* for linking"]},
{'category': 'C/C++', 'severity': Severity.MEDIUM,
'description': 'Operator new returns NULL',
@@ -1750,6 +1785,15 @@
'description': 'Mismatched class vs struct tags',
'patterns': [r".*: warning: '.+' defined as a .+ here but previously declared as a .+mismatched-tags",
r".*: warning: .+ was previously declared as a .+mismatched-tags"]},
+ {'category': 'FindEmulator', 'severity': Severity.HARMLESS,
+ 'description': 'FindEmulator: No such file or directory',
+ 'patterns': [r".*: warning: FindEmulator: .* No such file or directory"]},
+ {'category': 'google_tests', 'severity': Severity.HARMLESS,
+ 'description': 'google_tests: unknown installed file',
+ 'patterns': [r".*: warning: .*_tests: Unknown installed file for module"]},
+ {'category': 'make', 'severity': Severity.HARMLESS,
+ 'description': 'unusual tags debug eng',
+ 'patterns': [r".*: warning: .*: unusual tags debug eng"]},
# these next ones are to deal with formatting problems resulting from the log being mixed up by 'make -j'
{'category': 'C/C++', 'severity': Severity.SKIP,
@@ -1763,60 +1807,46 @@
'patterns': [r".*: warning: In file included from .+,"]},
# warnings from clang-tidy
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy readability',
- 'patterns': [r".*: .+\[readability-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy c++ core guidelines',
- 'patterns': [r".*: .+\[cppcoreguidelines-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-default-arguments',
- 'patterns': [r".*: .+\[google-default-arguments\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-runtime-int',
- 'patterns': [r".*: .+\[google-runtime-int\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-runtime-operator',
- 'patterns': [r".*: .+\[google-runtime-operator\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-runtime-references',
- 'patterns': [r".*: .+\[google-runtime-references\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-build',
- 'patterns': [r".*: .+\[google-build-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-explicit',
- 'patterns': [r".*: .+\[google-explicit-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-readability',
- 'patterns': [r".*: .+\[google-readability-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google-global',
- 'patterns': [r".*: .+\[google-global-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy google- other',
- 'patterns': [r".*: .+\[google-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy modernize',
- 'patterns': [r".*: .+\[modernize-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy misc',
- 'patterns': [r".*: .+\[misc-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy performance-faster-string-find',
- 'patterns': [r".*: .+\[performance-faster-string-find\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy performance-for-range-copy',
- 'patterns': [r".*: .+\[performance-for-range-copy\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy performance-implicit-cast-in-loop',
- 'patterns': [r".*: .+\[performance-implicit-cast-in-loop\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy performance-unnecessary-copy-initialization',
- 'patterns': [r".*: .+\[performance-unnecessary-copy-initialization\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy performance-unnecessary-value-param',
- 'patterns': [r".*: .+\[performance-unnecessary-value-param\]$"]},
+ group_tidy_warn_pattern('cert'),
+ group_tidy_warn_pattern('clang-diagnostic'),
+ group_tidy_warn_pattern('cppcoreguidelines'),
+ group_tidy_warn_pattern('llvm'),
+ simple_tidy_warn_pattern('google-default-arguments'),
+ simple_tidy_warn_pattern('google-runtime-int'),
+ simple_tidy_warn_pattern('google-runtime-operator'),
+ simple_tidy_warn_pattern('google-runtime-references'),
+ group_tidy_warn_pattern('google-build'),
+ group_tidy_warn_pattern('google-explicit'),
+ group_tidy_warn_pattern('google-redability'),
+ group_tidy_warn_pattern('google-global'),
+ group_tidy_warn_pattern('google-redability'),
+ group_tidy_warn_pattern('google-redability'),
+ group_tidy_warn_pattern('google'),
+ simple_tidy_warn_pattern('hicpp-explicit-conversions'),
+ simple_tidy_warn_pattern('hicpp-function-size'),
+ simple_tidy_warn_pattern('hicpp-invalid-access-moved'),
+ simple_tidy_warn_pattern('hicpp-member-init'),
+ simple_tidy_warn_pattern('hicpp-delete-operators'),
+ simple_tidy_warn_pattern('hicpp-special-member-functions'),
+ simple_tidy_warn_pattern('hicpp-use-equals-default'),
+ simple_tidy_warn_pattern('hicpp-use-equals-delete'),
+ simple_tidy_warn_pattern('hicpp-no-assembler'),
+ simple_tidy_warn_pattern('hicpp-noexcept-move'),
+ simple_tidy_warn_pattern('hicpp-use-override'),
+ group_tidy_warn_pattern('hicpp'),
+ group_tidy_warn_pattern('modernize'),
+ group_tidy_warn_pattern('misc'),
+ simple_tidy_warn_pattern('performance-faster-string-find'),
+ simple_tidy_warn_pattern('performance-for-range-copy'),
+ simple_tidy_warn_pattern('performance-implicit-cast-in-loop'),
+ simple_tidy_warn_pattern('performance-inefficient-string-concatenation'),
+ simple_tidy_warn_pattern('performance-type-promotion-in-math-fn'),
+ simple_tidy_warn_pattern('performance-unnecessary-copy-initialization'),
+ simple_tidy_warn_pattern('performance-unnecessary-value-param'),
+ group_tidy_warn_pattern('performance'),
+ group_tidy_warn_pattern('readability'),
+
+ # warnings from clang-tidy's clang-analyzer checks
{'category': 'C/C++', 'severity': Severity.ANALYZER,
'description': 'clang-analyzer Unreachable code',
'patterns': [r".*: warning: This statement is never executed.*UnreachableCode"]},
@@ -1857,18 +1887,12 @@
'description': 'clang-analyzer call path problems',
'patterns': [r".*: warning: Call Path : .+"]},
{'category': 'C/C++', 'severity': Severity.ANALYZER,
+ 'description': 'clang-analyzer excessive padding',
+ 'patterns': [r".*: warning: Excessive padding in '.*'"]},
+ {'category': 'C/C++', 'severity': Severity.ANALYZER,
'description': 'clang-analyzer other',
'patterns': [r".*: .+\[clang-analyzer-.+\]$",
r".*: Call Path : .+$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy CERT',
- 'patterns': [r".*: .+\[cert-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-tidy llvm',
- 'patterns': [r".*: .+\[llvm-.+\]$"]},
- {'category': 'C/C++', 'severity': Severity.TIDY,
- 'description': 'clang-diagnostic',
- 'patterns': [r".*: .+\[clang-diagnostic-.+\]$"]},
# catch-all for warnings this script doesn't know about yet
{'category': 'C/C++', 'severity': Severity.UNKNOWN,
@@ -2387,7 +2411,8 @@
def parse_input_file(infile):
- """Parse input file, match warning lines."""
+ """Parse input file, collect parameters and warning lines."""
+ global android_root
global platform_version
global target_product
global target_variant
@@ -2402,7 +2427,7 @@
if warning_pattern.match(line):
line = normalize_warning_line(line)
warning_lines.add(line)
- elif line_counter < 50:
+ elif line_counter < 100:
# save a little bit of time by only doing this for the first few lines
line_counter += 1
m = re.search('(?<=^PLATFORM_VERSION=).*', line)
@@ -2414,6 +2439,9 @@
m = re.search('(?<=^TARGET_BUILD_VARIANT=).*', line)
if m is not None:
target_variant = m.group(0)
+ m = re.search('.* TOP=([^ ]*) .*', line)
+ if m is not None:
+ android_root = m.group(1)
return warning_lines
@@ -2465,10 +2493,11 @@
if (FlagURL == "") return line;
if (FlagSeparator == "") {
return line.replace(ParseLinePattern,
- "<a href='" + FlagURL + "/$1'>$1</a>:$2:$3");
+ "<a target='_blank' href='" + FlagURL + "/$1'>$1</a>:$2:$3");
}
return line.replace(ParseLinePattern,
- "<a href='" + FlagURL + "/$1" + FlagSeparator + "$2'>$1:$2</a>:$3");
+ "<a target='_blank' href='" + FlagURL + "/$1" + FlagSeparator +
+ "$2'>$1:$2</a>:$3");
}
function createArrayOfDictionaries(n) {
var result = [];
@@ -2672,48 +2701,46 @@
return category['description']
-def string_for_csv(s):
- # Only some Java warning desciptions have used quotation marks.
- # TODO(chh): if s has double quote character, s should be quoted.
- if ',' in s:
- # TODO(chh): replace a double quote with two double quotes in s.
- return '"{}"'.format(s)
- return s
-
-
-def count_severity(sev, kind):
+def count_severity(writer, sev, kind):
"""Count warnings of given severity."""
total = 0
for i in warn_patterns:
if i['severity'] == sev and i['members']:
n = len(i['members'])
total += n
- warning = string_for_csv(kind + ': ' + description_for_csv(i))
- print '{},,{}'.format(n, warning)
+ warning = kind + ': ' + description_for_csv(i)
+ writer.writerow([n, '', warning])
# print number of warnings for each project, ordered by project name.
projects = i['projects'].keys()
projects.sort()
for p in projects:
- print '{},{},{}'.format(i['projects'][p], p, warning)
- print '{},,{}'.format(total, kind + ' warnings')
+ writer.writerow([i['projects'][p], p, warning])
+ writer.writerow([total, '', kind + ' warnings'])
+
return total
# dump number of warnings in csv format to stdout
-def dump_csv():
+def dump_csv(writer):
"""Dump number of warnings in csv format to stdout."""
sort_warnings()
total = 0
for s in Severity.range:
- total += count_severity(s, Severity.column_headers[s])
- print '{},,{}'.format(total, 'All warnings')
+ total += count_severity(writer, s, Severity.column_headers[s])
+ writer.writerow([total, '', 'All warnings'])
def main():
warning_lines = parse_input_file(open(args.buildlog, 'r'))
parallel_classify_warnings(warning_lines)
+ # If a user pases a csv path, save the fileoutput to the path
+ # If the user also passed gencsv write the output to stdout
+ # If the user did not pass gencsv flag dump the html report to stdout.
+ if args.csvpath:
+ with open(args.csvpath, 'w') as f:
+ dump_csv(csv.writer(f, lineterminator='\n'))
if args.gencsv:
- dump_csv()
+ dump_csv(csv.writer(sys.stdout, lineterminator='\n'))
else:
dump_html()