Merge "BOARD_VNDK_VERSION always by default"
diff --git a/Changes.md b/Changes.md
index a03a48c..d834803 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,31 @@
# Build System Changes for Android.mk Writers
+## Stop referencing sysprop_library directly from cc modules
+
+For the migration to Bazel, we are no longer mapping sysprop_library targets
+to their generated `cc_library` counterparts when dependning on them from a
+cc module. Instead, directly depend on the generated module by prefixing the
+module name with `lib`. For example, depending on the following module:
+
+```
+sysprop_library {
+ name: "foo",
+ srcs: ["foo.sysprop"],
+}
+```
+
+from a module named `bar` can be done like so:
+
+```
+cc_library {
+ name: "bar",
+ srcs: ["bar.cc"],
+ deps: ["libfoo"],
+}
+```
+
+Failure to do this will result in an error about a missing variant.
+
## Gensrcs starts disallowing depfile property
To migrate all gensrcs to Bazel, we are restricting the use of depfile property
@@ -790,6 +816,16 @@
Clang is the default and only supported Android compiler, so there is no reason
for this option to exist.
+### Stop using clang property
+
+Clang has been deleted from Soong. To fix any build errors, remove the clang
+property from affected Android.bp files using bpmodify.
+
+
+``` make
+go run bpmodify.go -w -m=module_name -remove-property=true -property=clang filepath
+```
+
### Other envsetup.sh variables {#other_envsetup_variables}
* ANDROID_TOOLCHAIN
diff --git a/core/Makefile b/core/Makefile
index e724a43..f28935b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -963,6 +963,8 @@
$(if $(1),--partition_size $(1),--dynamic_partition_size)
endef
+ifndef BOARD_PREBUILT_BOOTIMAGE
+
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
@@ -984,14 +986,8 @@
INTERNAL_BOOTIMAGE_FILES := $(filter-out --%,$(INTERNAL_BOOTIMAGE_ARGS))
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-VERITY_KEYID := veritykeyid=id:`openssl x509 -in $(PRODUCT_VERITY_SIGNING_KEY).x509.pem -text \
- | grep keyid | sed 's/://g' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]' | sed 's/keyid//g'`
-endif
-endif
-
-INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
+# TODO(b/241346584) Remove this when BOARD_BUILD_SYSTEM_ROOT_IMAGE is deprecated
+INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT))
# kernel cmdline/base/pagesize in boot.
# - If using GKI, use GENERIC_KERNEL_CMDLINE. Remove kernel base and pagesize because they are
@@ -1108,30 +1104,7 @@
@echo "make $@: ignoring dependencies"
$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
-else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
-
-# $1: boot image target
-define build_boot_supports_boot_signer
- $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
- $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)
- $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
-endef
-
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
- $(call pretty,"Target boot image: $@")
- $(call build_boot_supports_boot_signer,$@)
-
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
-
-.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
- @echo "make $@: ignoring dependencies"
- $(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_boot_signer,$(b)))
-
-else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
+else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # BOARD_AVB_ENABLE != true
# $1: boot image target
define build_boot_supports_vboot
@@ -1180,7 +1153,10 @@
endif # BUILDING_BOOT_IMAGE
else # TARGET_NO_KERNEL == "true"
-ifdef BOARD_PREBUILT_BOOTIMAGE
+INSTALLED_BOOTIMAGE_TARGET :=
+endif # TARGET_NO_KERNEL
+
+else # BOARD_PREBUILT_BOOTIMAGE defined
INTERNAL_PREBUILT_BOOTIMAGE := $(BOARD_PREBUILT_BOOTIMAGE)
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
@@ -1202,10 +1178,8 @@
cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
endif # BOARD_AVB_ENABLE
-else # BOARD_PREBUILT_BOOTIMAGE not defined
-INSTALLED_BOOTIMAGE_TARGET :=
endif # BOARD_PREBUILT_BOOTIMAGE
-endif # TARGET_NO_KERNEL
+
endif # my_installed_prebuilt_gki_apex not defined
my_apex_extracted_boot_image :=
@@ -1285,10 +1259,6 @@
INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_VENDOR_RAMDISK_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
- $(error vboot 1.0 does not support vendor_boot partition)
-endif
-
INTERNAL_VENDOR_RAMDISK_FILES := $(filter $(TARGET_VENDOR_RAMDISK_OUT)/%, \
$(ALL_DEFAULT_INSTALLED_MODULES))
@@ -1809,13 +1779,6 @@
INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
endif
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
-INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY_FEC))
-INTERNAL_USERIMAGES_DEPS += $(FEC)
-endif
-endif
-
ifeq ($(BOARD_AVB_ENABLE),true)
INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
endif
@@ -1832,14 +1795,6 @@
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
-ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
-
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
- $(error vboot 1.0 doesn't support logical partition)
-endif
-
-endif # PRODUCT_USE_DYNAMIC_PARTITIONS
-
# $(1) the partition name (eg system)
# $(2) the image prop file
define add-common-flags-to-image-props
@@ -1949,11 +1904,6 @@
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
$(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
$(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCT_SUPPORTS_VERITY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
$(if $(PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
$(if $(PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
@@ -2483,12 +2433,6 @@
$(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
$(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
$(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
- $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
- $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
- $(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
- $(BOOT_SIGNER) /recovery $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)\
- )\
- )
$(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
$(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
@@ -2501,9 +2445,6 @@
endef
recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
-ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER))
- recoveryimage-deps += $(BOOT_SIGNER)
-endif
ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
recoveryimage-deps += $(VBOOT_SIGNER)
endif
@@ -6258,7 +6199,7 @@
# The mac build doesn't build dex2oat, so create the zip file only if the build OS is linux.
ifeq ($(BUILD_OS),linux)
ifneq ($(DEX2OAT),)
-dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN) $(AAPT2)
+dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN)
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/dexdump
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/oatdump
DEXPREOPT_TOOLS_ZIP := $(PRODUCT_OUT)/dexpreopt_tools.zip
@@ -6860,8 +6801,6 @@
sdk_atree_files += $(atree_dir)/sdk.atree
endif
-include $(BUILD_SYSTEM)/sdk_font.mk
-
deps := \
$(OUT_DOCS)/offline-sdk-timestamp \
$(SDK_METADATA_FILES) \
@@ -6869,8 +6808,7 @@
$(ATREE_FILES) \
$(sdk_atree_files) \
$(HOST_OUT_EXECUTABLES)/atree \
- $(HOST_OUT_EXECUTABLES)/line_endings \
- $(SDK_FONT_DEPS)
+ $(HOST_OUT_EXECUTABLES)/line_endings
INTERNAL_SDK_TARGET := $(sdk_dir)/$(sdk_name).zip
$(INTERNAL_SDK_TARGET): PRIVATE_NAME := $(sdk_name)
@@ -6893,7 +6831,6 @@
fi; \
done; \
if [ $$FAIL ]; then exit 1; fi
- $(hide) echo $(notdir $(SDK_FONT_DEPS)) | tr " " "\n" > $(SDK_FONT_TEMP)/fontsInSdk.txt
$(hide) ( \
ATREE_STRIP="$(HOST_STRIP) -x" \
$(HOST_OUT_EXECUTABLES)/atree \
@@ -6909,7 +6846,6 @@
-v "TARGET_ARCH=$(TARGET_ARCH)" \
-v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
-v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
- -v "FONT_OUT=$(SDK_FONT_TEMP)" \
-o $(PRIVATE_DIR) && \
HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
@@ -6975,7 +6911,6 @@
# depended on by each module in soong_cc_prebuilt.mk, where the module will have
# a dependency on each shared library that it needs to be "reinstalled".
FUZZ_SHARED_DEPS := $(call copy-many-files,$(strip $(FUZZ_TARGET_SHARED_DEPS_INSTALL_PAIRS)))
-AFL_FUZZ_SHARED_DEPS := $(call copy-many-files,$(strip $(AFL_FUZZ_TARGET_SHARED_DEPS_INSTALL_PAIRS)))
# -----------------------------------------------------------------
# The rule to build all fuzz targets for C++ and Rust, and package them.
@@ -6991,10 +6926,6 @@
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
-.PHONY: haiku-afl
-haiku-afl: $(SOONG_AFL_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_AFL_FUZZ_TARGETS)
-$(call dist-for-goals,haiku-afl,$(SOONG_AFL_FUZZ_PACKAGING_ARCH_MODULES))
-
.PHONY: haiku-java
haiku-java: $(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_TARGETS)
$(call dist-for-goals,haiku-java,$(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES))
@@ -7003,6 +6934,10 @@
haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
$(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
+# -----------------------------------------------------------------
+# Extract platform fonts used in Layoutlib
+include $(BUILD_SYSTEM)/layoutlib_fonts.mk
+
# -----------------------------------------------------------------
# OS Licensing
diff --git a/core/OWNERS b/core/OWNERS
index 980186c..d48ceab 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,9 @@
per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
+# For global Proguard rules
+per-file proguard*.flags = jdduke@google.com
+
# For version updates
per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index ca4c606..975194c 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -76,7 +76,9 @@
# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
# default.
INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
+ bluetooth \
permission \
+ uwb \
wifi \
$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
@@ -117,3 +119,11 @@
SYSTEM_OPTIMIZE_JAVA ?= true
endif
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
+
+# Check for SupplementalApi module.
+ifeq ($(wildcard packages/modules/SupplementalApi),)
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,false)
+else
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true)
+endif
+
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 8d057ac..eb429cd 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -128,6 +128,9 @@
LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
endif
+ # NOTE(ruperts): Consider moving the logic below out of a conditional,
+ # to avoid the possibility of silently ignoring user settings.
+
PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
@@ -142,6 +145,8 @@
$(built_module): $(LOCAL_CERTIFICATE_LINEAGE)
$(built_module): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
+
+ $(built_module): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
endif
ifneq ($(LOCAL_MODULE_STEM),)
diff --git a/core/board_config.mk b/core/board_config.mk
index d280349..a0c16ca 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -283,6 +283,8 @@
$(if $(filter-out true false,$($(var))), \
$(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
+include $(BUILD_SYSTEM)/board_config_wifi.mk
+
# Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
new file mode 100644
index 0000000..ddeb0d7
--- /dev/null
+++ b/core/board_config_wifi.mk
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ###############################################################
+# This file adds WIFI variables into soong config namespace (`wifi`)
+# ###############################################################
+
+ifdef BOARD_WLAN_DEVICE
+ $(call soong_config_set,wifi,board_wlan_device,$(BOARD_WLAN_DEVICE))
+endif
+ifdef WIFI_DRIVER_MODULE_PATH
+ $(call soong_config_set,wifi,driver_module_path,$(WIFI_DRIVER_MODULE_PATH))
+endif
+ifdef WIFI_DRIVER_MODULE_ARG
+ $(call soong_config_set,wifi,driver_module_arg,$(WIFI_DRIVER_MODULE_ARG))
+endif
+ifdef WIFI_DRIVER_MODULE_NAME
+ $(call soong_config_set,wifi,driver_module_name,$(WIFI_DRIVER_MODULE_NAME))
+endif
+ifdef WIFI_DRIVER_FW_PATH_STA
+ $(call soong_config_set,wifi,driver_fw_path_sta,$(WIFI_DRIVER_FW_PATH_STA))
+endif
+ifdef WIFI_DRIVER_FW_PATH_AP
+ $(call soong_config_set,wifi,driver_fw_path_ap,$(WIFI_DRIVER_FW_PATH_AP))
+endif
+ifdef WIFI_DRIVER_FW_PATH_P2P
+ $(call soong_config_set,wifi,driver_fw_path_p2p,$(WIFI_DRIVER_FW_PATH_P2P))
+endif
+ifdef WIFI_DRIVER_FW_PATH_PARAM
+ $(call soong_config_set,wifi,driver_fw_path_param,$(WIFI_DRIVER_FW_PATH_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_CTRL_PARAM
+ $(call soong_config_set,wifi,driver_state_ctrl_param,$(WIFI_DRIVER_STATE_CTRL_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_ON
+ $(call soong_config_set,wifi,driver_state_on,$(WIFI_DRIVER_STATE_ON))
+endif
+ifdef WIFI_DRIVER_STATE_OFF
+ $(call soong_config_set,wifi,driver_state_off,$(WIFI_DRIVER_STATE_OFF))
+endif
+ifdef WIFI_MULTIPLE_VENDOR_HALS
+ $(call soong_config_set,wifi,multiple_vendor_hals,$(WIFI_MULTIPLE_VENDOR_HALS))
+endif
+ifneq ($(wildcard vendor/google/libraries/GoogleWifiConfigLib),)
+ $(call soong_config_set,wifi,google_wifi_config_lib,true)
+endif
+ifdef WIFI_HAL_INTERFACE_COMBINATIONS
+ $(call soong_config_set,wifi,hal_interface_combinations,$(WIFI_HAL_INTERFACE_COMBINATIONS))
+endif
+ifdef WIFI_HIDL_FEATURE_AWARE
+ $(call soong_config_set,wifi,hidl_feature_aware,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DUAL_INTERFACE
+ $(call soong_config_set,wifi,hidl_feature_dual_interface,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP
+ $(call soong_config_set,wifi,hidl_feature_disable_ap,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP_MAC_RANDOMIZATION
+ $(call soong_config_set,wifi,hidl_feature_disable_ap_mac_randomization,true)
+endif
+ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
+ $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif
\ No newline at end of file
diff --git a/core/config.mk b/core/config.mk
index 856cb48..9e4b93a 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -161,7 +161,10 @@
$(KATI_obsolete_var PRODUCT_CHECK_ELF_FILES,Use BUILD_BROKEN_PREBUILT_ELF_FILES instead)
$(KATI_obsolete_var ALL_GENERATED_SOURCES,ALL_GENERATED_SOURCES is no longer used)
$(KATI_obsolete_var ALL_ORIGINAL_DYNAMIC_BINARIES,ALL_ORIGINAL_DYNAMIC_BINARIES is no longer used)
-
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY_FEC,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_BOOT_SIGNER,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_VERITY_SIGNING_KEY,VB 1.0 and related variables are no longer supported)
# Used to force goals to build. Only use for conditionally defined goals.
.PHONY: FORCE
FORCE:
@@ -629,10 +632,8 @@
VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
-BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
-FEC := $(HOST_OUT_EXECUTABLES)/fec
DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -787,6 +788,7 @@
else
MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
endif
+.KATI_READONLY := MAINLINE_SEPOLICY_DEV_CERTIFICATES
BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -803,7 +805,7 @@
# is made which breaks compatibility with the previous platform sepolicy version,
# not just on every increase in PLATFORM_SDK_VERSION. The minor version should
# be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 32
+sepolicy_major_vers := 33
sepolicy_minor_vers := 0
ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -957,16 +959,6 @@
$(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
)
-# BOARD_*_PARTITION_LIST: a list of the following tokens
-valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm system_dlkm
-$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
- $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
- $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
- $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
- Valid names are $(valid_super_partition_list))))
-valid_super_partition_list :=
-
-
# Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
$(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
diff --git a/core/definitions.mk b/core/definitions.mk
index cbb1613..d3eab95 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2822,6 +2822,7 @@
$(hide) mv $(1) $(1).unsigned
$(hide) $(JAVA) -Djava.library.path=$$(dirname $(SIGNAPK_JNI_LIBRARY_PATH)) -jar $(SIGNAPK_JAR) \
$(if $(strip $(PRIVATE_CERTIFICATE_LINEAGE)), --lineage $(PRIVATE_CERTIFICATE_LINEAGE)) \
+ $(if $(strip $(PRIVATE_ROTATION_MIN_SDK_VERSION)), --rotation-min-sdk-version $(PRIVATE_ROTATION_MIN_SDK_VERSION)) \
$(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
$(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
$(hide) mv $(1).signed $(1)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 216168b..b303b52 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -245,7 +245,7 @@
$(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
$(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
$(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
- $(my_enforced_uses_libraries): $(AAPT)
+ $(my_enforced_uses_libraries): $(AAPT2)
$(my_enforced_uses_libraries): $(my_verify_script)
$(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
$(my_enforced_uses_libraries): $(my_manifest_or_apk)
@@ -254,7 +254,7 @@
$(my_verify_script) \
--enforce-uses-libraries \
--enforce-uses-libraries-status $@ \
- --aapt $(AAPT) \
+ --aapt $(AAPT2) \
$(PRIVATE_USES_LIBRARIES) \
$(PRIVATE_OPTIONAL_USES_LIBRARIES) \
$(PRIVATE_DEXPREOPT_CONFIGS) \
diff --git a/core/layoutlib_fonts.mk b/core/layoutlib_fonts.mk
new file mode 100644
index 0000000..d2a814f
--- /dev/null
+++ b/core/layoutlib_fonts.mk
@@ -0,0 +1,35 @@
+# Fonts for layoutlib
+
+FONT_TEMP := $(call intermediates-dir-for,PACKAGING,fonts,HOST,COMMON)
+
+# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
+font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
+font_config := $(addprefix $(FONT_TEMP)/, $(notdir $(font_config)))
+
+$(font_config): $(FONT_TEMP)/%.xml: \
+ frameworks/base/data/fonts/%.xml
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of fonts on the device that we want to ship. This is all .ttf, .ttc and .otf fonts.
+fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
+fonts_device := $(addprefix $(FONT_TEMP)/, $(notdir $(fonts_device)))
+
+# TODO: If the font file is a symlink, reuse the font renamed from the symlink
+# target.
+$(fonts_device): $(FONT_TEMP)/%: $(TARGET_OUT)/fonts/%
+ $(hide) mkdir -p $(dir $@)
+ $(hide) cp -vf $< $@
+
+# List of all dependencies - all fonts and configuration files.
+FONT_FILES := $(fonts_device) $(font_config)
+
+.PHONY: layoutlib layoutlib-tests
+layoutlib layoutlib-tests: $(FONT_FILES)
+
+$(call dist-for-goals, layoutlib, $(foreach m,$(FONT_FILES), $(m):layoutlib_native/fonts/$(notdir $(m))))
+
+FONT_TEMP :=
+font_config :=
+fonts_device :=
+FONT_FILES :=
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 8199ad2..c7a173b 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -481,6 +481,8 @@
$(LOCAL_BUILT_MODULE): $(LOCAL_CERTIFICATE_LINEAGE)
$(LOCAL_BUILT_MODULE): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
+$(LOCAL_BUILT_MODULE): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
+
# Set a actual_partition_tag (calculated in base_rules.mk) for the package.
PACKAGES.$(LOCAL_PACKAGE_NAME).PARTITION := $(actual_partition_tag)
diff --git a/core/product.mk b/core/product.mk
index 7351313..ee2fa5a 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -136,10 +136,7 @@
# PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
_product_list_vars += PRODUCT_BOOT_JARS_EXTRA
-_product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY_FEC
_product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
# List of system_server classpath jars on the platform.
_product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
@@ -168,7 +165,6 @@
_product_list_vars += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
_product_single_value_vars += PRODUCT_VBOOT_SIGNING_KEY
_product_single_value_vars += PRODUCT_VBOOT_SIGNING_SUBKEY
-_product_single_value_vars += PRODUCT_VERITY_SIGNING_KEY
_product_single_value_vars += PRODUCT_SYSTEM_VERITY_PARTITION
_product_single_value_vars += PRODUCT_VENDOR_VERITY_PARTITION
_product_single_value_vars += PRODUCT_PRODUCT_VERITY_PARTITION
diff --git a/core/product_config.mk b/core/product_config.mk
index 540289a..198dde4 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -429,7 +429,7 @@
# Show a warning wall of text if non-compliance-GSI products set this option.
ifdef PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
- ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64,$(PRODUCT_NAME)))
+ ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64 gsi_tv_arm gsi_tv_arm64,$(PRODUCT_NAME)))
$(warning PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT is set but \
PRODUCT_NAME ($(PRODUCT_NAME)) doesn't look like a GSI for compliance \
testing. This is a special configuration for compliance GSI, so do make \
diff --git a/core/proguard.flags b/core/proguard.flags
index aee5271..53f63d8 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,10 +9,15 @@
# Add this flag in your package's own configuration if it's needed.
#-flattenpackagehierarchy
-# Keep classes and methods that have the guava @VisibleForTesting annotation
--keep @**.VisibleForTesting class *
--keepclassmembers class * {
-@**.VisibleForTesting *;
+# Keep classes and methods that have @VisibleForTesting annotations, except in
+# intermediate libraries that export those annotations (e.g., androidx, guava).
+# This avoids keeping library-specific test code that isn't actually needed
+# for platform testing.
+# TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
+# and com.google.common.annotations.VisibleForTesting use in platform code.
+-keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
+-keepclassmembers class !androidx.**,!com.google.common.**,* {
+ @**.VisibleForTesting *;
}
# Keep rule for members that are needed solely to keep alive downstream weak
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index b5d14fa..38feec3 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -75,6 +75,19 @@
# has a fallback, but again, don't use Futures.getChecked on Android regardless.
-dontwarn java.lang.ClassValue
+# Ignore missing annotation references for various support libraries.
+# While this is not ideal, it should be relatively safe given that
+# 1) runtime-visible annotations will still be kept, and 2) compile-time
+# annotations are stripped by R8 anyway.
+# Note: The ** prefix is used to accommodate jarjar repackaging.
+# TODO(b/242088131): Remove these exemptions after resolving transitive libs
+# dependencies that are provided to R8.
+-dontwarn **android**.annotation*.**
+-dontwarn **com.google.errorprone.annotations.**
+-dontwarn javax.annotation.**
+-dontwarn org.checkerframework.**
+-dontwarn org.jetbrains.annotations.**
+
# Less spammy.
-dontnote
diff --git a/core/rbe.mk b/core/rbe.mk
index 90328d3..8566ff0 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -46,12 +46,6 @@
cxx_compare := false
endif
- ifdef RBE_CXX_COMPARE
- cxx_compare := $(RBE_CXX_COMPARE)
- else
- cxx_compare := "false"
- endif
-
ifdef RBE_JAVAC_EXEC_STRATEGY
javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
else
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
deleted file mode 100644
index 1742925..0000000
--- a/core/sdk_font.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-###############################################################################
-# Fonts shipped with the SDK need to be renamed for Java to handle them
-# properly. Hence, a special script is used to rename the fonts. We bundle all
-# the fonts that are shipped on a newer non-space-constrained device. However,
-# OpenType fonts used on these devices are not supported by Java. Their
-# replacements are added separately.
-###############################################################################
-
-
-# The script that renames the font.
-sdk_font_rename_script := frameworks/layoutlib/rename_font/build_font_single.py
-
-# Location of the fonttools library that the above script depends on.
-fonttools_lib := external/fonttools/Lib
-
-# A temporary location to store the renamed fonts. atree picks all files in
-# this directory and bundles it with the SDK.
-SDK_FONT_TEMP := $(call intermediates-dir-for,PACKAGING,sdk-fonts,HOST,COMMON)
-
-# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
-sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-sdk_font_config := $(addprefix $(SDK_FONT_TEMP)/standard/, $(notdir $(sdk_font_config)))
-
-$(sdk_font_config): $(SDK_FONT_TEMP)/standard/%.xml: \
- frameworks/base/data/fonts/%.xml
- $(hide) mkdir -p $(dir $@)
- $(hide) cp -vf $< $@
-
-# List of fonts on the device that we want to ship. This is all .ttf fonts.
-sdk_fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf, $(INTERNAL_SYSTEMIMAGE_FILES))
-sdk_fonts_device := $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_fonts_device)))
-
-# Macro to rename the font.
-sdk_rename_font = PYTHONPATH=$$PYTHONPATH:$(fonttools_lib) $(sdk_font_rename_script) \
- $1 $2
-
-# TODO: If the font file is a symlink, reuse the font renamed from the symlink
-# target.
-$(sdk_fonts_device): $(SDK_FONT_TEMP)/%.ttf: $(TARGET_OUT)/fonts/%.ttf \
- $(sdk_font_rename_script)
- $(hide) mkdir -p $(dir $@)
- $(hide) $(call sdk_rename_font,$<,$@)
-
-# List of all dependencies - all fonts and configuration files.
-SDK_FONT_DEPS := $(sdk_fonts_device) $(sdk_font_config)
-
-# Define a macro to create rule for addititional fonts that we want to include
-# in the SDK.
-# $1 Output font name
-# $2 Source font path
-define sdk-extra-font-rule
-fontfullname := $$(SDK_FONT_TEMP)/$1
-ifeq ($$(filter $$(fontfullname),$$(sdk_fonts_device)),)
-SDK_FONT_DEPS += $$(fontfullname)
-$$(fontfullname): $2 $$(sdk_font_rename_script)
- $$(hide) mkdir -p $$(dir $$@)
- $$(hide) $$(call sdk_rename_font,$$<,$$@)
-endif
-fontfullname :=
-endef
-
-# These extra fonts are used as a replacement for OpenType fonts.
-$(eval $(call sdk-extra-font-rule,NanumGothic.ttf,external/naver-fonts/NanumGothic.ttf))
-$(eval $(call sdk-extra-font-rule,DroidSansFallback.ttf,frameworks/base/data/fonts/DroidSansFallbackFull.ttf))
-
-sdk-extra-font-rule :=
diff --git a/core/soong_config.mk b/core/soong_config.mk
index c4a40af..28ceebd 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -94,6 +94,7 @@
$(call add_json_list, AAPTPrebuiltDPI, $(PRODUCT_AAPT_PREBUILT_DPI))
$(call add_json_str, DefaultAppCertificate, $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+$(call add_json_str, MainlineSepolicyDevCertificates, $(MAINLINE_SEPOLICY_DEV_CERTIFICATES))
$(call add_json_str, AppsDefaultVersionName, $(APPS_DEFAULT_VERSION_NAME))
@@ -250,7 +251,7 @@
$(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
$(call add_json_map, $(namespace))\
$(foreach key,$(SOONG_CONFIG_$(namespace)),\
- $(call add_json_str,$(key),$(SOONG_CONFIG_$(namespace)_$(key))))\
+ $(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_$(namespace)_$(key)))))\
$(call end_json_map))
$(call end_json_map)
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index c9b07da..680ad11 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -62,8 +62,6 @@
CUSTOM_IMAGE_MODULES \
CUSTOM_IMAGE_COPY_FILES \
CUSTOM_IMAGE_SELINUX \
- CUSTOM_IMAGE_SUPPORT_VERITY \
- CUSTOM_IMAGE_SUPPORT_VERITY_FEC \
CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
CUSTOM_IMAGE_AVB_HASH_ENABLE \
CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS \
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f9ae2c1..2626120 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -91,9 +91,6 @@
$(my_built_custom_image): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
$(my_built_custom_image): PRIVATE_PICKUP_FILES := $(my_pickup_files)
$(my_built_custom_image): PRIVATE_SELINUX := $(CUSTOM_IMAGE_SELINUX)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY := $(CUSTOM_IMAGE_SUPPORT_VERITY)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY_FEC := $(CUSTOM_IMAGE_SUPPORT_VERITY_FEC)
-$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCT_VERITY_SIGNING_KEY)
$(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
$(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
$(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
@@ -108,9 +105,6 @@
else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
$(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
endif
-ifeq (true,$(CUSTOM_IMAGE_SUPPORT_VERITY_FEC))
- $(my_built_custom_image): $(FEC)
-endif
$(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) $(my_custom_image_modules_dep) \
$(CUSTOM_IMAGE_DICT_FILE)
@echo "Build image $@"
@@ -130,13 +124,6 @@
$(hide) echo "partition_size=$(PRIVATE_PARTITION_SIZE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(if $(PRIVATE_SELINUX),$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
- $(if $(PRIVATE_SUPPORT_VERITY),\
- $(hide) echo "verity=$(PRIVATE_SUPPORT_VERITY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
- echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
- $(if $(PRIVATE_SUPPORT_VERITY_FEC),\
- $(hide) echo "verity_fec=$(PRIVATE_SUPPORT_VERITY_FEC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
$(hide) echo "avb_avbtool=$(PRIVATE_AVB_AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
$(if $(PRIVATE_AVB_KEY_PATH),\
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index ce25ee2..8c74c72 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -42,24 +42,23 @@
DEFAULT_PLATFORM_VERSION := UP1A
.KATI_READONLY := DEFAULT_PLATFORM_VERSION
-MIN_PLATFORM_VERSION := TP1A
+MIN_PLATFORM_VERSION := UP1A
MAX_PLATFORM_VERSION := UP1A
# The last stable version name of the platform that was released. During
# development, this stays at that previous version, while the codename indicates
# further work based on the previous version.
-PLATFORM_VERSION_LAST_STABLE := 12
+PLATFORM_VERSION_LAST_STABLE := 13
.KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
# These are the current development codenames, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
# This is the user-visible version. In a final release build it should
# be empty to use PLATFORM_VERSION as the user-visible version. For
# a preview release it can be set to a user-friendly value like `12 Preview 1`
-PLATFORM_DISPLAY_VERSION :=
+PLATFORM_DISPLAY_VERSION := 13
ifndef PLATFORM_SDK_VERSION
# This is the canonical definition of the SDK version, which defines
@@ -74,16 +73,16 @@
# When you increment the PLATFORM_SDK_VERSION please ensure you also
# clear out the following text file of all older PLATFORM_VERSION's:
# cts/tests/tests/os/assets/platform_versions.txt
- PLATFORM_SDK_VERSION := 32
+ PLATFORM_SDK_VERSION := 33
endif
.KATI_READONLY := PLATFORM_SDK_VERSION
# This is the sdk extension version of this tree.
-PLATFORM_SDK_EXTENSION_VERSION := 1
+PLATFORM_SDK_EXTENSION_VERSION := 3
.KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
# This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION := 1
+PLATFORM_BASE_SDK_EXTENSION_VERSION := 3
.KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
# This are all known codenames.
@@ -104,9 +103,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2022-07-05
+ PLATFORM_SECURITY_PATCH := 2022-08-05
endif
-.KATI_READONLY := PLATFORM_SECURITY_PATCH
include $(BUILD_SYSTEM)/version_util.mk
-
diff --git a/envsetup.sh b/envsetup.sh
index 8856212..5c95479 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -455,10 +455,19 @@
{
local code
local results
+ # Lunch must be run in the topdir, but this way we get a clear error
+ # message, instead of FileNotFound.
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/build/build/make/orchestrator/core/orchestrator.py" "$@"
+ else
+ _multitree_lunch_error
+ return 1
+ fi
if $(echo "$1" | grep -q '^-') ; then
# Calls starting with a -- argument are passed directly and the function
# returns with the lunch.py exit code.
- build/build/make/orchestrator/core/lunch.py "$@"
+ "${T}/build/build/make/orchestrator/core/lunch.py" "$@"
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -469,7 +478,7 @@
fi
else
# All other calls go through the --lunch variant of lunch.py
- results=($(build/build/make/orchestrator/core/lunch.py --lunch "$@"))
+ results=($(${T}/build/build/make/orchestrator/core/lunch.py --lunch "$@"))
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -790,6 +799,10 @@
set_stuff_for_environment
[[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
destroy_build_var_cache
+
+ if [[ -n "${CHECK_MU_CONFIG:-}" ]]; then
+ check_mu_config
+ fi
}
unset COMMON_LUNCH_CHOICES_CACHE
@@ -1813,7 +1826,8 @@
function _trigger_build()
(
local -r bc="$1"; shift
- if T="$(gettop)"; then
+ local T=$(gettop)
+ if [ -n "$T" ]; then
_wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
else
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
@@ -1873,8 +1887,9 @@
function multitree_build()
{
- if T="$(multitree_gettop)"; then
- "$T/build/build/orchestrator/core/orchestrator.py" "$@"
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/build/build/make/orchestrator/core/orchestrator.py" "$@"
else
_multitree_lunch_error
return 1
diff --git a/orchestrator/README b/orchestrator/README
deleted file mode 100644
index 9a1e302..0000000
--- a/orchestrator/README
+++ /dev/null
@@ -1,8 +0,0 @@
-DEMO
-
-from the root of the workspace
-
-multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
-
-rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
-
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
deleted file mode 100644
index d7abef7..0000000
--- a/orchestrator/core/api_assembly.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import json
-import os
-import sys
-
-import api_assembly_cc
-import ninja_tools
-
-
-ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
-
-def assemble_apis(context, inner_trees):
- # Find all of the contributions from the inner tree
- contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
-
- # Load and validate the contribution files
- # TODO: Check timestamps and skip unnecessary work
- contributions = []
- for tree_key, filenames in contribution_files_dict.items():
- for filename in filenames:
- json_data = load_contribution_file(context, filename)
- if not json_data:
- continue
- # TODO: Validate the configs, especially that the domains match what we asked for
- # from the lunch config.
- contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
-
- # Group contributions by language and API surface
- stub_libraries = collate_contributions(contributions)
-
- # Initialize the ninja file writer
- with open(context.out.api_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Initialize the build file writer
- build_file = BuildFile() # TODO: parameters?
-
- # Iterate through all of the stub libraries and generate rules to assemble them
- # and Android.bp/BUILD files to make those available to inner trees.
- # TODO: Parallelize? Skip unnecessary work?
- for stub_library in stub_libraries:
- STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
-
- # TODO: Handle host_executables separately or as a StubLibrary language?
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
- "Scan an inner_tree's out dir for the api contribution files."
- directory = inner_tree.out.api_contributions_dir()
- result = []
- with os.scandir(directory) as it:
- for dirent in it:
- if not dirent.is_file():
- break
- if dirent.name.endswith(".json"):
- result.append(os.path.join(directory, dirent.name))
- return result
-
-
-def load_contribution_file(context, filename):
- "Load and return the API contribution at filename. On error report error and return None."
- with open(filename) as f:
- try:
- return json.load(f)
- except json.decoder.JSONDecodeError as ex:
- # TODO: Error reporting
- context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
- raise ex
-
-
-class StubLibraryContribution(object):
- def __init__(self, inner_tree, api_domain, library_contribution):
- self.inner_tree = inner_tree
- self.api_domain = api_domain
- self.library_contribution = library_contribution
-
-
-class StubLibrary(object):
- def __init__(self, language, api_surface, api_surface_version, name):
- self.language = language
- self.api_surface = api_surface
- self.api_surface_version = api_surface_version
- self.name = name
- self.contributions = []
-
- def add_contribution(self, contrib):
- self.contributions.append(contrib)
-
-
-def collate_contributions(contributions):
- """Take the list of parsed API contribution files, and group targets by API Surface, version,
- language and library name, and return a StubLibrary object for each of those.
- """
- grouped = {}
- for contribution in contributions:
- for language in STUB_LANGUAGE_HANDLERS.keys():
- for library in contribution.json_data.get(language, []):
- key = (language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- stub_library = grouped.get(key)
- if not stub_library:
- stub_library = StubLibrary(language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- grouped[key] = stub_library
- stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
- contribution.json_data["api_domain"], library))
- return list(grouped.values())
-
-
-def assemble_java_api_library(context, ninja, build_file, stub_library):
- print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-def assemble_resource_api_library(context, ninja, build_file, stub_library):
- print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-STUB_LANGUAGE_HANDLERS = {
- "cc_libraries": api_assembly_cc.assemble_cc_api_library,
- "java_libraries": assemble_java_api_library,
- "resource_libraries": assemble_resource_api_library,
-}
-
-
-class BuildFile(object):
- "Abstract generator for Android.bp files and BUILD files."
- pass
-
-
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
deleted file mode 100644
index ca9b2a4..0000000
--- a/orchestrator/core/api_assembly_cc.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-def assemble_cc_api_library(context, ninja, build_file, stub_library):
- staging_dir = context.out.api_library_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
- work_dir = context.out.api_library_work_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
-
- # Generate rules to copy headers
- includes = []
- include_dir = os.path.join(staging_dir, "include")
- for contrib in stub_library.contributions:
- for headers in contrib.library_contribution["headers"]:
- root = headers["root"]
- for file in headers["files"]:
- # TODO: Deal with collisions of the same name from multiple contributions
- include = os.path.join(include_dir, file)
- ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
- includes.append(include)
-
- # Generate rule to run ndkstubgen
-
-
- # Generate rule to compile stubs to library
-
- # Generate phony rule to build the library
- # TODO: This name probably conflictgs with something
- ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
- stub_library.name)), includes)
-
- # Generate build files
-
diff --git a/orchestrator/core/api_domain.py b/orchestrator/core/api_domain.py
deleted file mode 100644
index bb7306c..0000000
--- a/orchestrator/core/api_domain.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-class ApiDomain(object):
- def __init__(self, name, tree, product):
- # Product will be null for modules
- self.name = name
- self.tree = tree
- self.product = product
-
- def __str__(self):
- return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
- self.name, self.tree.root,
- "None" if self.product is None else "\"%s\"" % self.product)
-
diff --git a/orchestrator/core/api_export.py b/orchestrator/core/api_export.py
deleted file mode 100644
index 2f26b02..0000000
--- a/orchestrator/core/api_export.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def export_apis_from_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["export_api_contributions"])
-
-
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
deleted file mode 100644
index 03fe890..0000000
--- a/orchestrator/core/final_packaging.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import sys
-
-import ninja_tools
-import ninja_syntax # Has to be after ninja_tools because of the path hack
-
-def final_packaging(context, inner_trees):
- """Pull together all of the previously defined rules into the final build stems."""
-
- with open(context.out.outer_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Add the api surfaces file
- ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
-
- # For each inner tree
- for tree in inner_trees.keys():
- # TODO: Verify that inner_tree.ninja was generated
-
- # Read and verify file
- build_targets = read_build_targets_json(context, tree)
- if not build_targets:
- continue
-
- # Generate the ninja and build files for this inner tree
- generate_cross_domain_build_rules(context, ninja, tree, build_targets)
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def read_build_targets_json(context, tree):
- """Read and validate the build_targets.json file for the given tree."""
- try:
- f = open(tree.out.build_targets_file())
- except FileNotFoundError:
- # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
- return None
-
- data = None
- with f:
- try:
- data = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
- # TODO: Error reporting
- raise ex
-
- # TODO: Better error handling
- # TODO: Validate json schema
- return data
-
-
-def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
- "Generate the ninja and build files for the inner tree."
- # Include the inner tree's inner_tree.ninja
- ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
-
- # Generate module rules and files
- for module in build_targets.get("modules", []):
- generate_shared_module(context, ninja, tree, module)
-
- # Generate staging rules
- staging_dir = context.out.staging_dir()
- for staged in build_targets.get("staging", []):
- # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
- dest = staged["dest"]
- dest = os.path.join(staging_dir, dest)
- if "src" in staged and "obj" in staged:
- context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
- ) # TODO: Filename and line if possible
- if "src" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
- elif "obj" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
- ninja.add_global_phony("staging", [dest])
-
- # Generate dist rules
- dist_dir = context.out.dist_dir()
- for disted in build_targets.get("dist", []):
- # TODO: Enforce that dest absolute
- dest = disted["dest"]
- dest = os.path.join(dist_dir, dest)
- ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
- ninja.add_global_phony("dist", [dest])
-
-
-def generate_shared_module(context, ninja, tree, module):
- """Generate ninja rules for the given build_targets.json defined module."""
- module_name = module["name"]
- module_type = module["type"]
- share_dir = context.out.module_share_dir(module_type, module_name)
- src_file = os.path.join(tree.root, module["file"])
-
- if module_type == "apex":
- ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
- # TODO: Generate build file
-
- else:
- # TODO: Better error handling
- raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
deleted file mode 100644
index d348ee7..0000000
--- a/orchestrator/core/inner_tree.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-import textwrap
-
-class InnerTreeKey(object):
- """Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
- If a single tree uses two different prdoucts, then we won't make assumptions about
- them sharing _anything_.
- TODO: This is true for soong. It's more likely that bazel could do analysis for two
- products at the same time in a single tree, so there's an optimization there to do
- eventually."""
- def __init__(self, root, product):
- self.root = root
- self.product = product
-
- def __str__(self):
- return "TreeKey(root=%s product=%s)" % (enquote(self.root), enquote(self.product))
-
- def __hash__(self):
- return hash((self.root, self.product))
-
- def _cmp(self, other):
- if self.root < other.root:
- return -1
- if self.root > other.root:
- return 1
- if self.product == other.product:
- return 0
- if self.product is None:
- return -1
- if other.product is None:
- return 1
- if self.product < other.product:
- return -1
- return 1
-
- def __eq__(self, other):
- return self._cmp(other) == 0
-
- def __ne__(self, other):
- return self._cmp(other) != 0
-
- def __lt__(self, other):
- return self._cmp(other) < 0
-
- def __le__(self, other):
- return self._cmp(other) <= 0
-
- def __gt__(self, other):
- return self._cmp(other) > 0
-
- def __ge__(self, other):
- return self._cmp(other) >= 0
-
-
-class InnerTree(object):
- def __init__(self, context, root, product):
- """Initialize with the inner tree root (relative to the workspace root)"""
- self.root = root
- self.product = product
- self.domains = {}
- # TODO: Base directory on OUT_DIR
- out_root = context.out.inner_tree_dir(root)
- if product:
- out_root += "_" + product
- else:
- out_root += "_unbundled"
- self.out = OutDirLayout(out_root)
-
- def __str__(self):
- return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
- enquote(self.product),
- " ".join([enquote(d) for d in sorted(self.domains.keys())]))
-
- def invoke(self, args):
- """Call the inner tree command for this inner tree. Exits on failure."""
- # TODO: Build time tracing
-
- # Validate that there is a .inner_build command to run at the root of the tree
- # so we can print a good error message
- inner_build_tool = os.path.join(self.root, ".inner_build")
- if not os.access(inner_build_tool, os.X_OK):
- sys.stderr.write(("Unable to execute %s. Is there an inner tree or lunch combo"
- + " misconfiguration?\n") % inner_build_tool)
- sys.exit(1)
-
- # TODO: This is where we should set up the shared trees
-
- # Build the command
- cmd = [inner_build_tool, "--out_dir", self.out.root()]
- for domain_name in sorted(self.domains.keys()):
- cmd.append("--api_domain")
- cmd.append(domain_name)
- cmd += args
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in inner tree: %s\nstopping multitree build.\n"
- % self.root)
- sys.exit(1)
-
-
-class InnerTrees(object):
- def __init__(self, trees, domains):
- self.trees = trees
- self.domains = domains
-
- def __str__(self):
- "Return a debugging dump of this object"
- return textwrap.dedent("""\
- InnerTrees {
- trees: [
- %(trees)s
- ]
- domains: [
- %(domains)s
- ]
- }""" % {
- "trees": "\n ".join(sorted([str(t) for t in self.trees.values()])),
- "domains": "\n ".join(sorted([str(d) for d in self.domains.values()])),
- })
-
-
- def for_each_tree(self, func, cookie=None):
- """Call func for each of the inner trees once for each product that will be built in it.
-
- The calls will be in a stable order.
-
- Return a map of the InnerTreeKey to any results returned from func().
- """
- result = {}
- for key in sorted(self.trees.keys()):
- result[key] = func(key, self.trees[key], cookie)
- return result
-
-
- def get(self, tree_key):
- """Get an inner tree for tree_key"""
- return self.trees.get(tree_key)
-
- def keys(self):
- "Get the keys for the inner trees in name order."
- return [self.trees[k] for k in sorted(self.trees.keys())]
-
-
-class OutDirLayout(object):
- """Encapsulates the logic about the layout of the inner tree out directories.
- See also context.OutDir for outer tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the inner tree."
- self._root = root
-
- def root(self):
- return self._root
-
- def tree_info_file(self):
- return os.path.join(self._root, "tree_info.json")
-
- def api_contributions_dir(self):
- return os.path.join(self._root, "api_contributions")
-
- def build_targets_file(self):
- return os.path.join(self._root, "build_targets.json")
-
- def main_ninja_file(self):
- return os.path.join(self._root, "inner_tree.ninja")
-
-
-def enquote(s):
- return "None" if s is None else "\"%s\"" % s
-
-
diff --git a/orchestrator/core/interrogate.py b/orchestrator/core/interrogate.py
deleted file mode 100644
index 9fe769e..0000000
--- a/orchestrator/core/interrogate.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-
-def interrogate_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["describe"])
-
- info_json_filename = inner_tree.out.tree_info_file()
-
- # TODO: Error handling
- with open(info_json_filename) as f:
- info_json = json.load(f)
-
- # TODO: Check orchestrator protocol
-
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
deleted file mode 100755
index 70a2d1d..0000000
--- a/orchestrator/core/lunch.py
+++ /dev/null
@@ -1,414 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import glob
-import json
-import os
-import sys
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-EXIT_STATUS_NEED_HELP = 2
-
-
-def find_dirs(path, name, ttl=6):
- """Search at most ttl directories deep inside path for a directory called name
- and yield directories that match."""
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_dir():
- if dirent.name == name:
- yield os.path.join(path, dirent.name)
- elif ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in subdirs:
- yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
-
-
-def walk_paths(path, matcher, ttl=10):
- """Do a traversal of all files under path yielding each file that matches
- matcher."""
- # First look for files, then recurse into directories as needed.
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_file():
- if matcher(dirent.name):
- yield os.path.join(path, dirent.name)
- if dirent.is_dir():
- if ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in sorted(subdirs):
- yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
-
-
-def find_file(path, filename):
- """Return a file called filename inside path, no more than ttl levels deep.
-
- Directories are searched alphabetically.
- """
- for f in walk_paths(path, lambda x: x == filename):
- return f
-
-# TODO: When orchestrator is in its own git project remove the "build" and "make" here
-class LunchContext(object):
- """Mockable container for lunch"""
- def __init__(self, workspace_root, orchestrator_path_prefix_components=["build", "build", "make"]):
- self.workspace_root = workspace_root
- self.orchestrator_path_prefix_components = orchestrator_path_prefix_components
-
-def find_config_dirs(context):
- """Find the configuration files in the well known locations inside workspace_root
-
- <workspace_root>/<orchestrator>/<path>/<prefix>/orchestrator/multitree_combos
- (AOSP devices, such as cuttlefish)
-
- <workspace_root>/vendor/**/multitree_combos
- (specific to a vendor and not open sourced)
-
- <workspace_root>/device/**/multitree_combos
- (specific to a vendor and are open sourced)
-
- Directories are returned specifically in this order, so that aosp can't be
- overridden, but vendor overrides device.
- """
- # TODO: This is not looking in inner trees correctly.
-
- yield os.path.join(context.workspace_root, *context.orchestrator_path_prefix_components, "orchestrator/multitree_combos")
-
- dirs = ["vendor", "device"]
- for d in dirs:
- yield from find_dirs(os.path.join(context.workspace_root, d), "multitree_combos")
-
-
-def find_named_config(context, shortname):
- """Find the config with the given shortname inside context.workspace_root.
-
- Config directories are searched in the order described in find_config_dirs,
- and inside those directories, alphabetically."""
- filename = shortname + ".mcombo"
- for config_dir in find_config_dirs(context):
- found = find_file(config_dir, filename)
- if found:
- return found
- return None
-
-
-def parse_product_variant(s):
- """Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
- split = s.split("-")
- if len(split) != 2:
- return None
- return split
-
-
-def choose_config_from_args(context, args):
- """Return the config file we should use for the given argument,
- or null if there's no file that matches that."""
- if len(args) == 1:
- # Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
- # file we don't match that.
- pv = parse_product_variant(args[0])
- if pv:
- config = find_named_config(context, pv[0])
- if config:
- return (config, pv[1])
- return None, None
- # Look for a specifically named file
- if os.path.isfile(args[0]):
- return (args[0], args[1] if len(args) > 1 else None)
- # That file didn't exist, return that we didn't find it.
- return None, None
-
-
-class ConfigException(Exception):
- ERROR_IDENTIFY = "identify"
- ERROR_PARSE = "parse"
- ERROR_CYCLE = "cycle"
- ERROR_VALIDATE = "validate"
-
- def __init__(self, kind, message, locations=[], line=0):
- """Error thrown when loading and parsing configurations.
-
- Args:
- message: Error message to display to user
- locations: List of filenames of the include history. The 0 index one
- the location where the actual error occurred
- """
- if len(locations):
- s = locations[0]
- if line:
- s += ":"
- s += str(line)
- s += ": "
- else:
- s = ""
- s += message
- if len(locations):
- for loc in locations[1:]:
- s += "\n included from %s" % loc
- super().__init__(s)
- self.kind = kind
- self.message = message
- self.locations = locations
- self.line = line
-
-
-def load_config(filename):
- """Load a config, including processing the inherits fields.
-
- Raises:
- ConfigException on errors
- """
- def load_and_merge(fn, visited):
- with open(fn) as f:
- try:
- contents = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- if True:
- raise ConfigException(ConfigException.ERROR_PARSE, ex.msg, visited, ex.lineno)
- else:
- sys.stderr.write("exception %s" % ex.__dict__)
- raise ex
- # Merge all the parents into one data, with first-wins policy
- inherited_data = {}
- for parent in contents.get("inherits", []):
- if parent in visited:
- raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
- visited)
- deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
- # Then merge inherited_data into contents, but what's already there will win.
- deep_merge(contents, inherited_data)
- contents.pop("inherits", None)
- return contents
- return load_and_merge(filename, [filename,])
-
-
-def deep_merge(merged, addition):
- """Merge all fields of addition into merged. Pre-existing fields win."""
- for k, v in addition.items():
- if k in merged:
- if isinstance(v, dict) and isinstance(merged[k], dict):
- deep_merge(merged[k], v)
- else:
- merged[k] = v
-
-
-def make_config_header(config_file, config, variant):
- def make_table(rows):
- maxcols = max([len(row) for row in rows])
- widths = [0] * maxcols
- for row in rows:
- for i in range(len(row)):
- widths[i] = max(widths[i], len(row[i]))
- text = []
- for row in rows:
- rowtext = []
- for i in range(len(row)):
- cell = row[i]
- rowtext.append(str(cell))
- rowtext.append(" " * (widths[i] - len(cell)))
- rowtext.append(" ")
- text.append("".join(rowtext))
- return "\n".join(text)
-
- trees = [("Component", "Path", "Product"),
- ("---------", "----", "-------")]
- entry = config.get("system", None)
- def add_config_tuple(trees, entry, name):
- if entry:
- trees.append((name, entry.get("tree"), entry.get("product", "")))
- add_config_tuple(trees, config.get("system"), "system")
- add_config_tuple(trees, config.get("vendor"), "vendor")
- for k, v in config.get("modules", {}).items():
- add_config_tuple(trees, v, k)
-
- return """========================================
-TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
-TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
-
-%(trees)s
-========================================\n""" % {
- "TARGET_BUILD_COMBO": config_file,
- "TARGET_BUILD_VARIANT": variant,
- "trees": make_table(trees),
- }
-
-
-def do_lunch(args):
- """Handle the lunch command."""
- # Check that we're at the top of a multitree workspace by seeing if this script exists.
- if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
- sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
- return EXIT_STATUS_ERROR
-
- # Choose the config file
- config_file, variant = choose_config_from_args(".", args)
-
- if config_file == None:
- sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
- if variant == None:
- sys.stderr.write("Can't find variant for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Fail if the lunchable bit isn't set, because this isn't a usable config
- if not config.get("lunchable", False):
- sys.stderr.write("%s: Lunch config file (or inherited files) does not have the 'lunchable'"
- % config_file)
- sys.stderr.write(" flag set, which means it is probably not a complete lunch spec.\n")
-
- # All the validation has passed, so print the name of the file and the variant
- sys.stdout.write("%s\n" % config_file)
- sys.stdout.write("%s\n" % variant)
-
- # Write confirmation message to stderr
- sys.stderr.write(make_config_header(config_file, config, variant))
-
- return EXIT_STATUS_OK
-
-
-def find_all_combo_files(context):
- """Find all .mcombo files in the prescribed locations in the tree."""
- for dir in find_config_dirs(context):
- for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
- yield file
-
-
-def is_file_lunchable(config_file):
- """Parse config_file, flatten the inheritance, and return whether it can be
- used as a lunch target."""
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write("%s" % ex)
- return False
- return config.get("lunchable", False)
-
-
-def find_all_lunchable(context):
- """Find all mcombo files in the tree (rooted at context.workspace_root) that when
- parsed (and inheritance is flattened) have lunchable: true."""
- for f in [x for x in find_all_combo_files(context) if is_file_lunchable(x)]:
- yield f
-
-
-def load_current_config():
- """Load, validate and return the config as specified in TARGET_BUILD_COMBO. Throws
- ConfigException if there is a problem."""
-
- # Identify the config file
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- raise ConfigException(ConfigException.ERROR_IDENTIFY,
- "TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
-
- # Parse the config file
- config = load_config(config_file)
-
- # Validate the config file
- if not config.get("lunchable", False):
- raise ConfigException(ConfigException.ERROR_VALIDATE,
- "Lunch config file (or inherited files) does not have the 'lunchable'"
- + " flag set, which means it is probably not a complete lunch spec.",
- [config_file,])
-
- # TODO: Validate that:
- # - there are no modules called system or vendor
- # - everything has all the required files
-
- variant = os.environ.get("TARGET_BUILD_VARIANT")
- if not variant:
- variant = "eng" # TODO: Is this the right default?
- # Validate variant is user, userdebug or eng
-
- return config_file, config, variant
-
-def do_list():
- """Handle the --list command."""
- lunch_context = LunchContext(".")
- for f in sorted(find_all_lunchable(lunch_context)):
- print(f)
-
-
-def do_print(args):
- """Handle the --print command."""
- # Parse args
- if len(args) == 0:
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
- return EXIT_STATUS_NEED_HELP
- elif len(args) == 1:
- config_file = args[0]
- else:
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Print the config in json form
- json.dump(config, sys.stdout, indent=4)
-
- return EXIT_STATUS_OK
-
-
-def main(argv):
- if len(argv) < 2 or argv[1] == "-h" or argv[1] == "--help":
- return EXIT_STATUS_NEED_HELP
-
- if len(argv) == 2 and argv[1] == "--list":
- do_list()
- return EXIT_STATUS_OK
-
- if len(argv) == 2 and argv[1] == "--print":
- return do_print(argv[2:])
- return EXIT_STATUS_OK
-
- if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
- return do_lunch(argv[2:])
-
- sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
- return EXIT_STATUS_NEED_HELP
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
deleted file mode 100644
index ab81d66..0000000
--- a/orchestrator/core/ninja_runner.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import sys
-
-def run_ninja(context, targets):
- """Run ninja.
- """
-
- # Construct the command
- cmd = [
- context.tools.ninja(),
- "-f",
- context.out.outer_ninja_file(),
- ] + targets
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
- sys.exit(1)
-
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
deleted file mode 100644
index 16101ea..0000000
--- a/orchestrator/core/ninja_tools.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-# Workaround for python include path
-_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
-if _ninja_dir not in sys.path:
- sys.path.append(_ninja_dir)
-import ninja_writer
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-
-class Ninja(ninja_writer.Writer):
- """Some higher level constructs on top of raw ninja writing.
- TODO: Not sure where these should be."""
- def __init__(self, context, file):
- super(Ninja, self).__init__(file)
- self._context = context
- self._did_copy_file = False
- self._phonies = {}
-
- def add_copy_file(self, copy_to, copy_from):
- if not self._did_copy_file:
- self._did_copy_file = True
- rule = Rule("copy_file")
- rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
- + " -f ${in} ${out}")
- self.add_rule(rule)
- build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
- implicits=[self._context.tools.acp()])
- build_action.add_variable("out_dir", os.path.dirname(copy_to))
- self.add_build_action(build_action)
-
- def add_global_phony(self, name, deps):
- """Add a phony target where there are multiple places that will want to add to
- the same phony. If you can, to save memory, use add_phony instead of this function."""
- if type(deps) not in (list, tuple):
- raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
- self._phonies.setdefault(name, []).extend(deps)
-
- def write(self):
- for phony, deps in self._phonies.items():
- self.add_phony(phony, deps)
- super(Ninja, self).write()
-
-
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
deleted file mode 100755
index 508f73a..0000000
--- a/orchestrator/core/orchestrator.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-
-sys.dont_write_bytecode = True
-import api_assembly
-import api_domain
-import api_export
-import final_packaging
-import inner_tree
-import tree_analysis
-import interrogate
-import lunch
-import ninja_runner
-import utils
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-
-API_DOMAIN_SYSTEM = "system"
-API_DOMAIN_VENDOR = "vendor"
-API_DOMAIN_MODULE = "module"
-
-def process_config(context, lunch_config):
- """Returns a InnerTrees object based on the configuration requested in the lunch config."""
- def add(domain_name, tree_root, product):
- tree_key = inner_tree.InnerTreeKey(tree_root, product)
- if tree_key in trees:
- tree = trees[tree_key]
- else:
- tree = inner_tree.InnerTree(context, tree_root, product)
- trees[tree_key] = tree
- domain = api_domain.ApiDomain(domain_name, tree, product)
- domains[domain_name] = domain
- tree.domains[domain_name] = domain
-
- trees = {}
- domains = {}
-
- system_entry = lunch_config.get("system")
- if system_entry:
- add(API_DOMAIN_SYSTEM, system_entry["tree"], system_entry["product"])
-
- vendor_entry = lunch_config.get("vendor")
- if vendor_entry:
- add(API_DOMAIN_VENDOR, vendor_entry["tree"], vendor_entry["product"])
-
- for module_name, module_entry in lunch_config.get("modules", []).items():
- add(module_name, module_entry["tree"], None)
-
- return inner_tree.InnerTrees(trees, domains)
-
-
-def build():
- # Choose the out directory, set up error handling, etc.
- context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
-
- # Read the lunch config file
- try:
- config_file, config, variant = lunch.load_current_config()
- except lunch.ConfigException as ex:
- sys.stderr.write("%s\n" % ex)
- return EXIT_STATUS_ERROR
- sys.stdout.write(lunch.make_config_header(config_file, config, variant))
-
- # Construct the trees and domains dicts
- inner_trees = process_config(context, config)
-
- # 1. Interrogate the trees
- inner_trees.for_each_tree(interrogate.interrogate_tree)
- # TODO: Detect bazel-only mode
-
- # 2a. API Export
- inner_trees.for_each_tree(api_export.export_apis_from_tree)
-
- # 2b. API Surface Assembly
- api_assembly.assemble_apis(context, inner_trees)
-
- # 3a. Inner tree analysis
- tree_analysis.analyze_trees(context, inner_trees)
-
- # 3b. Final Packaging Rules
- final_packaging.final_packaging(context, inner_trees)
-
- # 4. Build Execution
- # TODO: Decide what we want the UX for selecting targets to be across
- # branches... since there are very likely to be conflicting soong short
- # names.
- print("Running ninja...")
- targets = ["staging", "system"]
- ninja_runner.run_ninja(context, targets)
-
- # Success!
- return EXIT_STATUS_OK
-
-def main(argv):
- return build()
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test/configs/another/bad.mcombo b/orchestrator/core/test/configs/another/bad.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/another/bad.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/another/dir/a b/orchestrator/core/test/configs/another/dir/a
deleted file mode 100644
index 7898192..0000000
--- a/orchestrator/core/test/configs/another/dir/a
+++ /dev/null
@@ -1 +0,0 @@
-a
diff --git a/orchestrator/core/test/configs/b-eng b/orchestrator/core/test/configs/b-eng
deleted file mode 100644
index eceb3f3..0000000
--- a/orchestrator/core/test/configs/b-eng
+++ /dev/null
@@ -1 +0,0 @@
-INVALID FILE
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
deleted file mode 100644
index 8cc8370..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "lunchable": "true"
-}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
deleted file mode 100644
index f9805f2..0000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
+++ /dev/null
@@ -1 +0,0 @@
-not a combo file
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29..0000000
--- a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test/configs/parsing/cycles/1.mcombo b/orchestrator/core/test/configs/parsing/cycles/1.mcombo
deleted file mode 100644
index ab8fe33..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/1.mcombo
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/2.mcombo"
- ]
-}
diff --git a/orchestrator/core/test/configs/parsing/cycles/2.mcombo b/orchestrator/core/test/configs/parsing/cycles/2.mcombo
deleted file mode 100644
index 2b774d0..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/2.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/3.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/cycles/3.mcombo b/orchestrator/core/test/configs/parsing/cycles/3.mcombo
deleted file mode 100644
index 41b629b..0000000
--- a/orchestrator/core/test/configs/parsing/cycles/3.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/1.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/1.mcombo b/orchestrator/core/test/configs/parsing/merge/1.mcombo
deleted file mode 100644
index a5a57d7..0000000
--- a/orchestrator/core/test/configs/parsing/merge/1.mcombo
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/merge/2.mcombo",
- "test/configs/parsing/merge/3.mcombo"
- ],
- "in_1": "1",
- "in_1_2": "1",
- "merged": {
- "merged_1": "1",
- "merged_1_2": "1"
- },
- "dict_1": { "a" : "b" }
-}
diff --git a/orchestrator/core/test/configs/parsing/merge/2.mcombo b/orchestrator/core/test/configs/parsing/merge/2.mcombo
deleted file mode 100644
index 00963e2..0000000
--- a/orchestrator/core/test/configs/parsing/merge/2.mcombo
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "in_1_2": "2",
- "in_2": "2",
- "in_2_3": "2",
- "merged": {
- "merged_1_2": "2",
- "merged_2": "2",
- "merged_2_3": "2"
- },
- "dict_2": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/3.mcombo b/orchestrator/core/test/configs/parsing/merge/3.mcombo
deleted file mode 100644
index 5fc9d90..0000000
--- a/orchestrator/core/test/configs/parsing/merge/3.mcombo
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "in_3": "3",
- "in_2_3": "3",
- "merged": {
- "merged_3": "3",
- "merged_2_3": "3"
- },
- "dict_3": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef4..0000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29..0000000
--- a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
deleted file mode 100755
index 5b890fb..0000000
--- a/orchestrator/core/test_lunch.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import unittest
-
-sys.dont_write_bytecode = True
-import lunch
-
-# Create a test LunchContext object
-# Test workspace is in test/configs
-# Orchestrator prefix inside it is build/make
-test_lunch_context = lunch.LunchContext("test/configs", ["build", "make"])
-
-class TestStringMethods(unittest.TestCase):
-
- def test_find_dirs(self):
- self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos"])
-
- def test_find_file(self):
- # Finds the one in device first because this is searching from the root,
- # not using find_named_config.
- self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
- "test/configs/device/aa/bb/multitree_combos/v.mcombo")
-
- def test_find_config_dirs(self):
- self.assertEqual([x for x in lunch.find_config_dirs(test_lunch_context)], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos"])
-
- def test_find_named_config(self):
- # Inside build/orchestrator, overriding device and vendor
- self.assertEqual(lunch.find_named_config(test_lunch_context, "b"),
- "test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
-
- # Nested dir inside a combo dir
- self.assertEqual(lunch.find_named_config(test_lunch_context, "nested"),
- "test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
-
- # Inside vendor, overriding device
- self.assertEqual(lunch.find_named_config(test_lunch_context, "v"),
- "test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
-
- # Inside device
- self.assertEqual(lunch.find_named_config(test_lunch_context, "d"),
- "test/configs/device/aa/bb/multitree_combos/d.mcombo")
-
- # Make sure we don't look too deep (for performance)
- self.assertIsNone(lunch.find_named_config(test_lunch_context, "too_deep"))
-
-
- def test_choose_config_file(self):
- # Empty string argument
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context, [""]),
- (None, None))
-
- # A PRODUCT-VARIANT name
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["v-eng"]),
- ("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
-
- # A PRODUCT-VARIANT name that conflicts with a file
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["b-eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # A PRODUCT-VARIANT that doesn't exist
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context, ["z-user"]),
- (None, None))
-
- # An explicit file
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # An explicit file that doesn't exist
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
- ["test/configs/doesnt_exist.mcombo", "eng"]),
- (None, None))
-
- # An explicit file without a variant should fail
- self.assertEqual(lunch.choose_config_from_args(test_lunch_context,
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
-
-
- def test_config_cycles(self):
- # Test that we catch cycles
- with self.assertRaises(lunch.ConfigException) as context:
- lunch.load_config("test/configs/parsing/cycles/1.mcombo")
- self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
-
- def test_config_merge(self):
- # Test the merge logic
- self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
- "in_1": "1",
- "in_1_2": "1",
- "merged": {"merged_1": "1",
- "merged_1_2": "1",
- "merged_2": "2",
- "merged_2_3": "2",
- "merged_3": "3"},
- "dict_1": {"a": "b"},
- "in_2": "2",
- "in_2_3": "2",
- "dict_2": {"a": "b"},
- "in_3": "3",
- "dict_3": {"a": "b"}
- })
-
- def test_list(self):
- self.assertEqual(sorted(lunch.find_all_lunchable(test_lunch_context)),
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
-
-if __name__ == "__main__":
- unittest.main()
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
deleted file mode 100644
index 052cad6..0000000
--- a/orchestrator/core/tree_analysis.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def analyze_trees(context, inner_trees):
- inner_trees.for_each_tree(run_analysis)
-
-def run_analysis(tree_key, inner_tree, cookie):
- inner_tree.invoke(["analyze"])
-
-
-
-
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
deleted file mode 100644
index 41310e0..0000000
--- a/orchestrator/core/utils.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import platform
-
-class Context(object):
- """Mockable container for global state."""
- def __init__(self, out_root, errors):
- self.out = OutDir(out_root)
- self.errors = errors
- self.tools = HostTools()
-
-class TestContext(Context):
- "Context for testing. The real Context is manually constructed in orchestrator.py."
-
- def __init__(self, test_work_dir, test_name):
- super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
- Errors(None))
-
-
-class OutDir(object):
- """Encapsulates the logic about the out directory at the outer-tree level.
- See also inner_tree.OutDirLayout for inner tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the outer tree."
- self._out_root = root
- self._intermediates = "intermediates"
-
- def root(self):
- return self._out_root
-
- def inner_tree_dir(self, tree_root):
- """Root directory for inner tree inside the out dir."""
- return os.path.join(self._out_root, "trees", tree_root)
-
- def api_ninja_file(self):
- """The ninja file that assembles API surfaces."""
- return os.path.join(self._out_root, "api_surfaces.ninja")
-
- def api_library_dir(self, surface, version, library):
- """Directory for all the contents of a library inside an API surface, including
- the build files. Any intermediates should go in api_library_work_dir."""
- return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
-
- def api_library_work_dir(self, surface, version, library):
- """Intermediates / scratch directory for library inside an API surface."""
- return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
- str(version), library)
-
- def outer_ninja_file(self):
- return os.path.join(self._out_root, "multitree.ninja")
-
- def module_share_dir(self, module_type, module_name):
- return os.path.join(self._out_root, "shared", module_type, module_name)
-
- def staging_dir(self):
- return os.path.join(self._out_root, "staging")
-
- def dist_dir(self):
- "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
- return os.path.join(self._out_root, "dist")
-
-class Errors(object):
- """Class for reporting and tracking errors."""
- def __init__(self, stream):
- """Initialize Error reporter with a file-like object."""
- self._stream = stream
- self._all = []
-
- def error(self, message, file=None, line=None, col=None):
- """Record the error message."""
- s = ""
- if file:
- s += str(file)
- s += ":"
- if line:
- s += str(line)
- s += ":"
- if col:
- s += str(col)
- s += ":"
- if s:
- s += " "
- s += str(message)
- if s[-1] != "\n":
- s += "\n"
- self._all.append(s)
- if self._stream:
- self._stream.write(s)
-
- def had_error(self):
- """Return if there were any errors reported."""
- return len(self._all)
-
- def get_errors(self):
- """Get all errors that were reported."""
- return self._all
-
-
-class HostTools(object):
- def __init__(self):
- if platform.system() == "Linux":
- self._arch = "linux-x86"
- else:
- raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
-
- # Some of these are called a lot, so pre-compute the strings to save memory
- self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
- self._acp = os.path.join(self._prebuilts, "acp")
- self._ninja = os.path.join(self._prebuilts, "ninja")
-
- def acp(self):
- return self._acp
-
- def ninja(self):
- return self._ninja
-
-
-def choose_out_dir():
- """Get the root of the out dir, either from the environment or by picking
- a default."""
- result = os.environ.get("OUT_DIR")
- if result:
- return result
- else:
- return "out"
diff --git a/orchestrator/demo/buffet_helper.py b/orchestrator/demo/buffet_helper.py
deleted file mode 100644
index fa29aeb..0000000
--- a/orchestrator/demo/buffet_helper.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-import os
-import sys
-import yaml
-
-from hierarchy import parse_hierarchy
-
-
-def main():
- if len(sys.argv) != 2:
- print('usage: %s target' % sys.argv[0])
- exit(1)
-
- args = sys.argv[1].split('-')
- if len(args) != 2:
- print('target format: {target}-{variant}')
- exit(1)
-
- target, variant = args
-
- if variant not in ['eng', 'user', 'userdebug']:
- print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
- variant)
- exit(1)
-
- build_top = os.getenv('BUFFET_BUILD_TOP')
- if not build_top:
- print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
- exit(1)
-
- hierarchy_map = parse_hierarchy(build_top)
-
- if target not in hierarchy_map:
- raise RuntimeError(
- "unknown target '%s': couldn't find the target. Supported targets are: %s"
- % (target, list(hierarchy_map.keys())))
-
- hierarchy = [target]
- while hierarchy_map[hierarchy[-1]]:
- hierarchy.append(hierarchy_map[hierarchy[-1]])
-
- print('Target hierarchy for %s: %s' % (target, hierarchy))
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/build_helper.py b/orchestrator/demo/build_helper.py
deleted file mode 100644
index c481f80..0000000
--- a/orchestrator/demo/build_helper.py
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import copy
-import hierarchy
-import json
-import logging
-import filecmp
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import collect_metadata
-import utils
-
-BUILD_CMD_TO_ALL = (
- 'clean',
- 'installclean',
- 'update-meta',
-)
-BUILD_ALL_EXEMPTION = (
- 'art',
-)
-
-def get_supported_product(ctx, supported_products):
- hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
- target = ctx.target_product()
-
- while target not in supported_products:
- if target not in hierarchy_map:
- return None
- target = hierarchy_map[target]
- return target
-
-
-def parse_goals(ctx, metadata, goals):
- """Parse goals and returns a map from each component to goals.
-
- e.g.
-
- "m main art timezone:foo timezone:bar" will return the following dict: {
- "main": {"all"},
- "art": {"all"},
- "timezone": {"foo", "bar"},
- }
- """
- # for now, goal should look like:
- # {component} or {component}:{subgoal}
-
- ret = collections.defaultdict(set)
-
- for goal in goals:
- # check if the command is for all components
- if goal in BUILD_CMD_TO_ALL:
- ret['all'].add(goal)
- continue
-
- # should be {component} or {component}:{subgoal}
- try:
- component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
- except ValueError:
- raise RuntimeError(
- 'unknown goal: %s: should be {component} or {component}:{subgoal}' %
- goal)
- if component not in metadata:
- raise RuntimeError('unknown goal: %s: component %s not found' %
- (goal, component))
- if not get_supported_product(ctx, metadata[component]['lunch_targets']):
- raise RuntimeError("can't find matching target. Supported targets are: " +
- str(metadata[component]['lunch_targets']))
-
- ret[component].add(subgoal)
-
- return ret
-
-
-def find_cycle(metadata):
- """ Finds a cyclic dependency among components.
-
- This is for debugging.
- """
- visited = set()
- parent_node = dict()
- in_stack = set()
-
- # Returns a cycle if one is found
- def dfs(node):
- # visit_order[visit_time[node] - 1] == node
- nonlocal visited, parent_node, in_stack
-
- visited.add(node)
- in_stack.add(node)
- if 'deps' not in metadata[node]:
- in_stack.remove(node)
- return None
- for next in metadata[node]['deps']:
- # We found a cycle (next ~ node) if next is still in the stack
- if next in in_stack:
- cycle = [node]
- while cycle[-1] != next:
- cycle.append(parent_node[cycle[-1]])
- return cycle
-
- # Else, continue searching
- if next in visited:
- continue
-
- parent_node[next] = node
- result = dfs(next)
- if result:
- return result
-
- in_stack.remove(node)
- return None
-
- for component in metadata:
- if component in visited:
- continue
-
- result = dfs(component)
- if result:
- return result
-
- return None
-
-
-def topological_sort_components(metadata):
- """ Performs topological sort on components.
-
- If A depends on B, B appears first.
- """
- # If A depends on B, we want B to appear before A. But the graph in metadata
- # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
- # reverse order, and then reverse the result again to get the desired order.
- indegree = collections.defaultdict(int)
- for component in metadata:
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] += 1
-
- component_queue = collections.deque()
- for component in metadata:
- if indegree[component] == 0:
- component_queue.append(component)
-
- result = []
- while component_queue:
- component = component_queue.popleft()
- result.append(component)
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] -= 1
- if indegree[dep] == 0:
- component_queue.append(dep)
-
- # If topological sort fails, there must be a cycle.
- if len(result) != len(metadata):
- cycle = find_cycle(metadata)
- raise RuntimeError('circular dependency found among metadata: %s' % cycle)
-
- return result[::-1]
-
-
-def add_dependency_goals(ctx, metadata, component, goals):
- """ Adds goals that given component depends on."""
- # For now, let's just add "all"
- # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
- if 'deps' not in metadata[component]:
- return
-
- for dep in metadata[component]['deps']:
- goals[dep].add('all')
-
-
-def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
- """ Analyzes the dependency graph among components, adds build commands for
-
- dependencies, and then sorts the goals.
-
- Returns a list of tuples: (component_name, set of subgoals).
- Builds should be run in the list's order.
- """
- # TODO(inseob@): after topological sort, some components may be built in
- # parallel.
-
- topological_order = topological_sort_components(metadata)
- combined_goals = copy.deepcopy(parsed_goals)
-
- # Add build rules for each component's dependencies
- # We do this in reverse order, so it can be transitive.
- # e.g. if A depends on B and B depends on C, and we build A,
- # C should also be built, in addition to B.
- for component in topological_order[::-1]:
- if component in combined_goals:
- add_dependency_goals(ctx, metadata, component, combined_goals)
-
- ret = []
- for component in ['all'] + topological_order:
- if component in combined_goals:
- ret.append((component, combined_goals[component]))
-
- return ret
-
-
-def run_build(ctx, metadata, component, subgoals):
- build_cmd = metadata[component]['build_cmd']
- out_dir = metadata[component]['out_dir']
- default_goals = ''
- if 'default_goals' in metadata[component]:
- default_goals = metadata[component]['default_goals']
-
- if 'all' in subgoals:
- goal = default_goals
- else:
- goal = ' '.join(subgoals)
-
- build_vars = ''
- if 'update-meta' in subgoals:
- build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
- # TODO(inseob@): shell escape
- cmd = [
- '/bin/bash', '-c',
- 'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
- (get_supported_product(ctx, metadata[component]['lunch_targets']),
- ctx.target_build_variant(), build_vars, build_cmd, goal)
- ]
- logging.debug('cwd: ' + metadata[component]['path'])
- logging.debug('running build: ' + str(cmd))
-
- subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
-
-
-def run_build_all(ctx, metadata, subgoals):
- for component in metadata:
- if component in BUILD_ALL_EXEMPTION:
- continue
- run_build(ctx, metadata, component, subgoals)
-
-
-def find_components(metadata, predicate):
- for component in metadata:
- if predicate(component):
- yield component
-
-
-def import_filegroups(metadata, component, exporting_component, target_file_pairs):
- imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
-
- bp_content = ''
- for name, outpaths in target_file_pairs:
- bp_content += ('filegroup {{\n'
- ' name: "{fname}",\n'
- ' srcs: [\n'.format(fname=name))
- for outpath in outpaths:
- bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
- bp_content += (' ],\n'
- '}\n')
-
- with tempfile.TemporaryDirectory() as tmp_dir:
- with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
- fout.write(bp_content)
- for _, outpaths in target_file_pairs:
- for outpath in outpaths:
- os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
- os.path.join(tmp_dir, os.path.basename(outpath)))
- cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
- if os.path.exists(imported_filegroup_dir) and len(
- cmp_result.left_only) + len(cmp_result.right_only) + len(
- cmp_result.diff_files) == 0:
- # Files are identical, it doesn't need to be written
- logging.info(
- 'imported files exists and the contents are identical: {} -> {}'
- .format(component, exporting_component))
- continue
- logging.info('creating symlinks for imported files: {} -> {}'.format(
- component, exporting_component))
- os.makedirs(imported_filegroup_dir, exist_ok=True)
- shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
- shutil.move(tmp_dir, imported_filegroup_dir)
-
-
-def prepare_build(metadata, component):
- imported_dir = os.path.join(metadata[component]['path'], 'imported')
- if utils.META_DEPS not in metadata[component]:
- if os.path.exists(imported_dir):
- logging.debug('remove {}'.format(imported_dir))
- shutil.rmtree(imported_dir)
- return
-
- imported_components = set()
- for exp_comp in metadata[component][utils.META_DEPS]:
- if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
- filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
- target_file_pairs = []
- for name in filegroups:
- target_file_pairs.append((name, filegroups[name]))
- import_filegroups(metadata, component, exp_comp, target_file_pairs)
- imported_components.add(exp_comp)
-
- # Remove directories that are not generated this time.
- if os.path.exists(imported_dir):
- if len(imported_components) == 0:
- shutil.rmtree(imported_dir)
- else:
- for remove_target in set(os.listdir(imported_dir)) - imported_components:
- logging.info('remove unnecessary imported dir: {}'.format(remove_target))
- shutil.rmtree(os.path.join(imported_dir, remove_target))
-
-
-def main():
- utils.set_logging_config(logging.DEBUG)
- ctx = utils.get_build_context()
-
- logging.info('collecting metadata')
-
- utils.set_logging_config(True)
-
- goals = sys.argv[1:]
- if not goals:
- logging.debug('empty goals. defaults to main')
- goals = ['main']
-
- logging.debug('goals: ' + str(goals))
-
- # Force update the metadata for the 'update-meta' build
- metadata_collector = collect_metadata.MetadataCollector(
- ctx.components_top(), ctx.out_dir(),
- collect_metadata.COMPONENT_METADATA_DIR,
- collect_metadata.COMPONENT_METADATA_FILE,
- force_update='update-meta' in goals)
- metadata_collector.collect()
-
- metadata = metadata_collector.get_metadata()
- logging.debug('metadata: ' + str(metadata))
-
- parsed_goals = parse_goals(ctx, metadata, goals)
- logging.debug('parsed goals: ' + str(parsed_goals))
-
- sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
- logging.debug('sorted goals with deps: ' + str(sorted_goals))
-
- for component, subgoals in sorted_goals:
- if component == 'all':
- run_build_all(ctx, metadata, subgoals)
- continue
- prepare_build(metadata, component)
- run_build(ctx, metadata, component, subgoals)
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/collect_metadata.py b/orchestrator/demo/collect_metadata.py
deleted file mode 100755
index 148167d..0000000
--- a/orchestrator/demo/collect_metadata.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import copy
-import json
-import logging
-import os
-import sys
-import yaml
-from collections import defaultdict
-from typing import (
- List,
- Set,
-)
-
-import utils
-
-# SKIP_COMPONENT_SEARCH = (
-# 'tools',
-# )
-COMPONENT_METADATA_DIR = '.repo'
-COMPONENT_METADATA_FILE = 'treeinfo.yaml'
-GENERATED_METADATA_FILE = 'metadata.json'
-COMBINED_METADATA_FILENAME = 'multitree_meta.json'
-
-
-class Dep(object):
- def __init__(self, name, component, deps_type):
- self.name = name
- self.component = component
- self.type = deps_type
- self.out_paths = list()
-
-
-class ExportedDep(Dep):
- def __init__(self, name, component, deps_type):
- super().__init__(name, component, deps_type)
-
- def setOutputPaths(self, output_paths: list):
- self.out_paths = output_paths
-
-
-class ImportedDep(Dep):
- required_type_map = {
- # import type: (required type, get imported module list)
- utils.META_FILEGROUP: (utils.META_MODULES, True),
- }
-
- def __init__(self, name, component, deps_type, import_map):
- super().__init__(name, component, deps_type)
- self.exported_deps: Set[ExportedDep] = set()
- self.imported_modules: List[str] = list()
- self.required_type = deps_type
- get_imported_module = False
- if deps_type in ImportedDep.required_type_map:
- self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
- if get_imported_module:
- self.imported_modules = import_map[name]
- else:
- self.imported_modules.append(name)
-
- def verify_and_add(self, exported: ExportedDep):
- if self.required_type != exported.type:
- raise RuntimeError(
- '{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
- .format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
- self.exported_deps.add(exported)
- self.out_paths.extend(exported.out_paths)
- # Remove duplicates. We may not use set() which is not JSON serializable
- self.out_paths = list(dict.fromkeys(self.out_paths))
-
-
-class MetadataCollector(object):
- """Visit all component directories and collect the metadata from them.
-
-Example of metadata:
-==========
-build_cmd: m # build command for this component. 'm' if omitted
-out_dir: out # out dir of this component. 'out' if omitted
-exports:
- libraries:
- - name: libopenjdkjvm
- - name: libopenjdkjvmd
- build_cmd: mma # build command for libopenjdkjvmd if specified
- out_dir: out/soong # out dir for libopenjdkjvmd if specified
- - name: libctstiagent
- APIs:
- - api1
- - api2
-imports:
- libraries:
- - lib1
- - lib2
- APIs:
- - import_api1
- - import_api2
-lunch_targets:
- - arm64
- - x86_64
-"""
-
- def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
- if not os.path.exists(out_dir):
- os.makedirs(out_dir)
-
- self.__component_top = component_top
- self.__out_dir = out_dir
- self.__metadata_path = os.path.join(meta_dir, meta_file)
- self.__combined_metadata_path = os.path.join(self.__out_dir,
- COMBINED_METADATA_FILENAME)
- self.__force_update = force_update
-
- self.__metadata = dict()
- self.__map_exports = dict()
- self.__component_set = set()
-
- def collect(self):
- """ Read precomputed combined metadata from the json file.
-
- If any components have updated their metadata, update the metadata
- information and the json file.
- """
- timestamp = self.__restore_metadata()
- if timestamp and os.path.getmtime(__file__) > timestamp:
- logging.info('Update the metadata as the orchestrator has been changed')
- self.__force_update = True
- self.__collect_from_components(timestamp)
-
- def get_metadata(self):
- """ Returns collected metadata from all components"""
- if not self.__metadata:
- logging.warning('Metadata is empty')
- return copy.deepcopy(self.__metadata)
-
- def __collect_from_components(self, timestamp):
- """ Read metadata from all components
-
- If any components have newer metadata files or are removed, update the
- combined metadata.
- """
- metadata_updated = False
- for component in os.listdir(self.__component_top):
- # if component in SKIP_COMPONENT_SEARCH:
- # continue
- if self.__read_component_metadata(timestamp, component):
- metadata_updated = True
- if self.__read_generated_metadata(timestamp, component):
- metadata_updated = True
-
- deleted_components = set()
- for meta in self.__metadata:
- if meta not in self.__component_set:
- logging.info('Component {} is removed'.format(meta))
- deleted_components.add(meta)
- metadata_updated = True
- for meta in deleted_components:
- del self.__metadata[meta]
-
- if metadata_updated:
- self.__update_dependencies()
- self.__store_metadata()
- logging.info('Metadata updated')
-
- def __read_component_metadata(self, timestamp, component):
- """ Search for the metadata file from a component.
-
- If the metadata is modified, read the file and update the metadata.
- """
- component_path = os.path.join(self.__component_top, component)
- metadata_file = os.path.join(component_path, self.__metadata_path)
- logging.info(
- 'Reading a metadata file from {} component ...'.format(component))
- if not os.path.isfile(metadata_file):
- logging.warning('Metadata file {} not found!'.format(metadata_file))
- return False
-
- self.__component_set.add(component)
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
- logging.info('... yaml not changed. Skip')
- return False
-
- with open(metadata_file) as f:
- meta = yaml.load(f, Loader=yaml.SafeLoader)
-
- meta['path'] = component_path
- if utils.META_BUILDCMD not in meta:
- meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
- if utils.META_OUTDIR not in meta:
- meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
-
- if utils.META_IMPORTS not in meta:
- meta[utils.META_IMPORTS] = defaultdict(dict)
- if utils.META_EXPORTS not in meta:
- meta[utils.META_EXPORTS] = defaultdict(dict)
-
- self.__metadata[component] = meta
- return True
-
- def __read_generated_metadata(self, timestamp, component):
- """ Read a metadata gerated by 'update-meta' build command from the soong build system
-
- Soong generate the metadata that has the information of import/export module/files.
- Build orchestrator read the generated metadata to collect the dependency information.
-
- Generated metadata has the following format:
- {
- "Imported": {
- "FileGroups": {
- "<name_of_filegroup>": [
- "<exported_module_name>",
- ...
- ],
- ...
- }
- }
- "Exported": {
- "<exported_module_name>": [
- "<output_file_path>",
- ...
- ],
- ...
- }
- }
- """
- if component not in self.__component_set:
- # skip reading generated metadata if the component metadata file was missing
- return False
- component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
- generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
- if not os.path.isfile(generated_metadata_file):
- logging.info('... Soong did not generated the metadata file. Skip')
- return False
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
- logging.info('... Soong generated metadata not changed. Skip')
- return False
-
- with open(generated_metadata_file, 'r') as gen_meta_json:
- try:
- gen_metadata = json.load(gen_meta_json)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- generated_metadata_file))
- return False
-
- if utils.SOONG_IMPORTED in gen_metadata:
- imported = gen_metadata[utils.SOONG_IMPORTED]
- if utils.SOONG_IMPORTED_FILEGROUPS in imported:
- self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
- if utils.SOONG_EXPORTED in gen_metadata:
- self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
-
- return True
-
- def __update_export_map(self):
- """ Read metadata of all components and update the export map
-
- 'libraries' and 'APIs' are special exproted types that are provided manually
- from the .yaml metadata files. These need to be replaced with the implementation
- in soong gerated metadata.
- The export type 'module' is generated from the soong build system from the modules
- with 'export: true' property. This export type includes a dictionary with module
- names as keys and their output files as values. These output files will be used as
- prebuilt sources when generating the imported modules.
- """
- self.__map_exports = dict()
- for comp in self.__metadata:
- if utils.META_EXPORTS not in self.__metadata[comp]:
- continue
- exports = self.__metadata[comp][utils.META_EXPORTS]
-
- for export_type in exports:
- for module in exports[export_type]:
- if export_type == utils.META_LIBS:
- name = module[utils.META_LIB_NAME]
- else:
- name = module
-
- if name in self.__map_exports:
- raise RuntimeError(
- 'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
- .format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
- exported_deps = ExportedDep(name, comp, export_type)
- if export_type == utils.META_MODULES:
- exported_deps.setOutputPaths(exports[export_type][module])
- self.__map_exports[name] = exported_deps
-
- def __verify_and_add_dependencies(self, component):
- """ Search all imported items from the export_map.
-
- If any imported items are not provided by the other components, report
- an error.
- Otherwise, add the component dependency and update the exported information to the
- import maps.
- """
- def verify_and_add_dependencies(imported_dep: ImportedDep):
- for module in imported_dep.imported_modules:
- if module not in self.__map_exports:
- raise RuntimeError(
- 'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
- .format(module=module, comp=imported_dep.component))
- imported_dep.verify_and_add(self.__map_exports[module])
-
- deps = self.__metadata[component][utils.META_DEPS]
- exp_comp = self.__map_exports[module].component
- if exp_comp not in deps:
- deps[exp_comp] = defaultdict(defaultdict)
- deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
-
- self.__metadata[component][utils.META_DEPS] = defaultdict()
- imports = self.__metadata[component][utils.META_IMPORTS]
- for import_type in imports:
- for module in imports[import_type]:
- verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
-
- def __check_imports(self):
- """ Search the export map to find the component to import libraries or APIs.
-
- Update the 'deps' field that includes the dependent components.
- """
- for component in self.__metadata:
- self.__verify_and_add_dependencies(component)
- if utils.META_DEPS in self.__metadata[component]:
- logging.debug('{comp} depends on {list} components'.format(
- comp=component, list=self.__metadata[component][utils.META_DEPS]))
-
- def __update_dependencies(self):
- """ Generate a dependency graph for the components
-
- Update __map_exports and the dependency graph with the maps.
- """
- self.__update_export_map()
- self.__check_imports()
-
- def __store_metadata(self):
- """ Store the __metadata dictionary as json format"""
- with open(self.__combined_metadata_path, 'w') as json_file:
- json.dump(self.__metadata, json_file, indent=2)
-
- def __restore_metadata(self):
- """ Read the stored json file and return the time stamps of the
-
- metadata file.
- """
- if not os.path.exists(self.__combined_metadata_path):
- return None
-
- with open(self.__combined_metadata_path, 'r') as json_file:
- try:
- self.__metadata = json.load(json_file)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- self.__combined_metadata_path))
- return None
-
- logging.info('Metadata restored from {}'.format(
- self.__combined_metadata_path))
- self.__update_export_map()
- return os.path.getmtime(self.__combined_metadata_path)
-
-
-def get_args():
-
- def check_dir(path):
- if os.path.exists(path) and os.path.isdir(path):
- return os.path.normpath(path)
- else:
- raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--component-top',
- help='Scan all components under this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../components'),
- type=check_dir)
- parser.add_argument(
- '--meta-file',
- help='Name of the metadata file.',
- default=COMPONENT_METADATA_FILE,
- type=str)
- parser.add_argument(
- '--meta-dir',
- help='Each component has the metadata in this directory.',
- default=COMPONENT_METADATA_DIR,
- type=str)
- parser.add_argument(
- '--out-dir',
- help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../out'),
- type=os.path.normpath)
- parser.add_argument(
- '--force',
- '-f',
- action='store_true',
- help='Force to collect metadata',
- )
- parser.add_argument(
- '--verbose',
- '-v',
- help='Increase output verbosity, e.g. "-v", "-vv".',
- action='count',
- default=0)
- return parser.parse_args()
-
-
-def main():
- args = get_args()
- utils.set_logging_config(args.verbose)
-
- metadata_collector = MetadataCollector(args.component_top, args.out_dir,
- args.meta_dir, args.meta_file, args.force)
- metadata_collector.collect()
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/envsetup.sh b/orchestrator/demo/envsetup.sh
deleted file mode 100644
index 902a37c..0000000
--- a/orchestrator/demo/envsetup.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-function buffet()
-{
- local product variant selection
- if [[ $# -ne 1 ]]; then
- echo "usage: buffet [target]" >&2
- return 1
- fi
-
- selection=$1
- product=${selection%%-*} # Trim everything after first dash
- variant=${selection#*-} # Trim everything up to first dash
-
- if [ -z "$product" ]
- then
- echo
- echo "Invalid lunch combo: $selection"
- return 1
- fi
-
- if [ -z "$variant" ]
- then
- if [[ "$product" =~ .*_(eng|user|userdebug) ]]
- then
- echo "Did you mean -${product/*_/}? (dash instead of underscore)"
- fi
- return 1
- fi
-
- BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
-
- export BUFFET_BUILD_TOP=$(pwd)
- export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
- export BUFFET_TARGET_PRODUCT=$product
- export BUFFET_TARGET_BUILD_VARIANT=$variant
- export BUFFET_TARGET_BUILD_TYPE=release
-}
-
-function m()
-{
- if [ -z "$BUFFET_BUILD_TOP" ]
- then
- echo "Run \"buffet [target]\" first"
- return 1
- fi
- python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
-}
diff --git a/orchestrator/demo/hierarchy.py b/orchestrator/demo/hierarchy.py
deleted file mode 100644
index ae1825c..0000000
--- a/orchestrator/demo/hierarchy.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import yaml
-
-
-def parse_hierarchy(build_top):
- """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
-
- Example of hierarchy file:
- ==========
- aosp_arm64:
- - armv8
- - aosp_cf_arm64_phone
-
- armv8:
- - aosp_oriole
- - aosp_sunfish
-
- aosp_oriole:
- - oriole
-
- aosp_sunfish:
- - sunfish
-
- oriole:
- # leaf
-
- sunfish:
- # leaf
- ==========
-
- If we parse this yaml, we get a dict looking like:
-
- {
- "sunfish": "aosp_sunfish",
- "oriole": "aosp_oriole",
- "aosp_oriole": "armv8",
- "aosp_sunfish": "armv8",
- "armv8": "aosp_arm64",
- "aosp_cf_arm64_phone": "aosp_arm64",
- "aosp_arm64": None, # no parent
- }
- """
- metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
- if not os.path.isfile(metadata_path):
- raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
-
- with open(metadata_path, 'r') as f:
- hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
-
- hierarchy_map = dict()
-
- for parent_target, child_targets in hierarchy_yaml.items():
- if not child_targets:
- # leaf
- continue
- for child_target in child_targets:
- hierarchy_map[child_target] = parent_target
-
- for parent_target in hierarchy_yaml:
- # targets with no parent
- if parent_target not in hierarchy_map:
- hierarchy_map[parent_target] = None
-
- return hierarchy_map
diff --git a/orchestrator/demo/hierarchy.yaml b/orchestrator/demo/hierarchy.yaml
deleted file mode 100644
index cc6de4d..0000000
--- a/orchestrator/demo/hierarchy.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
-# hierarchy of targets
-
-aosp_arm64:
-- armv8
-- aosp_cf_arm64_phone
-
-armv8:
-- mainline_modules_arm64
-
-mainline_modules_arm64:
-- aosp_oriole
-- aosp_sunfish
-- aosp_raven
-
-aosp_oriole:
-- oriole
-
-aosp_sunfish:
-- sunfish
-
-aosp_raven:
-- raven
-
-oriole:
-# leaf
-
-sunfish:
-# leaf
-
-raven:
-# leaf
-
-aosp_cf_arm64_phone:
-- cf_arm64_phone
-
-cf_arm64_phone:
-# leaf
diff --git a/orchestrator/demo/utils.py b/orchestrator/demo/utils.py
deleted file mode 100644
index 5dbbe4a..0000000
--- a/orchestrator/demo/utils.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import logging
-import os
-
-# default build configuration for each component
-DEFAULT_BUILDCMD = 'm'
-DEFAULT_OUTDIR = 'out'
-
-# yaml fields
-META_BUILDCMD = 'build_cmd'
-META_OUTDIR = 'out_dir'
-META_EXPORTS = 'exports'
-META_IMPORTS = 'imports'
-META_TARGETS = 'lunch_targets'
-META_DEPS = 'deps'
-# fields under 'exports' and 'imports'
-META_LIBS = 'libraries'
-META_APIS = 'APIs'
-META_FILEGROUP = 'filegroup'
-META_MODULES = 'modules'
-# fields under 'libraries'
-META_LIB_NAME = 'name'
-
-# fields for generated metadata file
-SOONG_IMPORTED = 'Imported'
-SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
-SOONG_EXPORTED = 'Exported'
-
-# export map items
-EXP_COMPONENT = 'component'
-EXP_TYPE = 'type'
-EXP_OUTPATHS = 'outpaths'
-
-class BuildContext:
-
- def __init__(self):
- self._build_top = os.getenv('BUFFET_BUILD_TOP')
- self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
- self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
- self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
- self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
- self._out_dir = os.path.join(self._build_top, 'out')
-
- if not self._build_top:
- raise RuntimeError("Can't find root. Did you run buffet?")
-
- def build_top(self):
- return self._build_top
-
- def components_top(self):
- return self._components_top
-
- def target_product(self):
- return self._target_product
-
- def target_build_variant(self):
- return self._target_build_variant
-
- def target_build_type(self):
- return self._target_build_type
-
- def out_dir(self):
- return self._out_dir
-
-
-def get_build_context():
- return BuildContext()
-
-
-def set_logging_config(verbose_level):
- verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
- verbosity = min(verbose_level, 2)
- logging.basicConfig(
- format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
- level=verbose_map[verbosity])
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
deleted file mode 100644
index 382844b..0000000
--- a/orchestrator/inner_build/common.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-def _parse_arguments(argv):
- argv = argv[1:]
- """Return an argparse options object."""
- # Top-level parser
- parser = argparse.ArgumentParser(prog=".inner_build")
-
- parser.add_argument("--out_dir", action="store", required=True,
- help="root of the output directory for this inner tree's API contributions")
-
- parser.add_argument("--api_domain", action="append", required=True,
- help="which API domains are to be built in this inner tree")
-
- subparsers = parser.add_subparsers(required=True, dest="command",
- help="subcommands")
-
- # inner_build describe command
- describe_parser = subparsers.add_parser("describe",
- help="describe the capabilities of this inner tree's build system")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("export_api_contributions",
- help="export the API contributions of this inner tree")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("analyze",
- help="main build analysis for this inner tree")
-
- # Parse the arguments
- return parser.parse_args(argv)
-
-
-class Commands(object):
- def Run(self, argv):
- """Parse the command arguments and call the corresponding subcommand method on
- this object.
-
- Throws AttributeError if the method for the command wasn't found.
- """
- args = _parse_arguments(argv)
- return getattr(self, args.command)(args)
-
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
deleted file mode 100755
index 264739b..0000000
--- a/orchestrator/inner_build/inner_build_demo.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-import textwrap
-
-sys.dont_write_bytecode = True
-import common
-
-def mkdirs(path):
- try:
- os.makedirs(path)
- except FileExistsError:
- pass
-
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- mkdirs(args.out_dir)
-
- with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "requires_ninja": true,
- "orchestrator_protocol_version": 1
- }"""))
-
- def export_api_contributions(self, args):
- contributions_dir = os.path.join(args.out_dir, "api_contributions")
- mkdirs(contributions_dir)
-
- if "system" in args.api_domain:
- with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
- # 'name: android' is android.jar
- f.write(textwrap.dedent("""\
- {
- "name": "api_a",
- "version": 1,
- "api_domain": "system",
- "cc_libraries": [
- {
- "name": "libhello1",
- "headers": [
- {
- "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "files": [
- "hello1.h"
- ]
- }
- ],
- "api": [
- "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
- ]
- }
- ]
- }"""))
-
- def analyze(self, args):
- if "system" in args.api_domain:
- # Nothing to export in this demo
- # Write a fake inner_tree.ninja; what the inner tree would have generated
- with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
- # TODO: Note that this uses paths relative to the workspace not the iner tree
- # for demo purposes until we get the ninja chdir change in.
- f.write(textwrap.dedent("""\
- rule compile_c
- command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
- rule link_so
- command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
- build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
- out_dir = %(OUT_DIR)s/libhello1
- cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
- build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
- out_dir = %(OUT_DIR)s/libhello1
- build system: phony %(OUT_DIR)s/libhello1/libhello1.so
- """ % { "OUT_DIR": args.out_dir }))
- with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "staging": [
- {
- "dest": "staging/system/lib/libhello1.so",
- "obj": "libhello1/libhello1.so"
- }
- ]
- }""" % { "OUT_DIR": args.out_dir }))
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/inner_build/inner_build_soong.py b/orchestrator/inner_build/inner_build_soong.py
deleted file mode 100755
index a653dcc..0000000
--- a/orchestrator/inner_build/inner_build_soong.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-sys.dont_write_bytecode = True
-import common
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- pass
-
-
- def export_api_contributions(self, args):
- pass
-
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
diff --git a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
deleted file mode 100644
index 0790226..0000000
--- a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "vendor": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "modules": {
- "com.android.bionic": {
- "tree": "sc-mainline-prod"
- }
- }
-}
diff --git a/orchestrator/multitree_combos/test.mcombo b/orchestrator/multitree_combos/test.mcombo
deleted file mode 100644
index 3ad0717..0000000
--- a/orchestrator/multitree_combos/test.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "inner_tree_system",
- "product": "system_lunch_product"
- },
- "vendor": {
- "tree": "inner_tree_vendor",
- "product": "vendor_lunch_product"
- },
- "modules": {
- "com.android.something": {
- "tree": "inner_tree_module"
- }
- }
-}
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
deleted file mode 100644
index df97b68..0000000
--- a/orchestrator/ninja/ninja_syntax.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from abc import ABC, abstractmethod
-
-from collections.abc import Iterator
-from typing import List
-
-TAB = " "
-
-class Node(ABC):
- '''An abstract class that can be serialized to a ninja file
- All other ninja-serializable classes inherit from this class'''
-
- @abstractmethod
- def stream(self) -> Iterator[str]:
- pass
-
-class Variable(Node):
- '''A ninja variable that can be reused across build actions
- https://ninja-build.org/manual.html#_variables'''
-
- def __init__(self, name:str, value:str, indent=0):
- self.name = name
- self.value = value
- self.indent = indent
-
- def stream(self) -> Iterator[str]:
- indent = TAB * self.indent
- yield f"{indent}{self.name} = {self.value}"
-
-class RuleException(Exception):
- pass
-
-# Ninja rules recognize a limited set of variables
-# https://ninja-build.org/manual.html#ref_rule
-# Keep this list sorted
-RULE_VARIABLES = ["command",
- "depfile",
- "deps",
- "description",
- "dyndep",
- "generator",
- "msvc_deps_prefix",
- "restat",
- "rspfile",
- "rspfile_content"]
-
-class Rule(Node):
- '''A shorthand for a command line that can be reused
- https://ninja-build.org/manual.html#_rules'''
-
- def __init__(self, name:str):
- self.name = name
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- if name not in RULE_VARIABLES:
- raise RuleException(f"{name} is not a recognized variable in a ninja rule")
-
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate_rule()
-
- yield f"rule {self.name}"
- # Yield rule variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate_rule(self):
- # command is a required variable in a ninja rule
- self._assert_variable_is_not_empty(variable_name="command")
-
- def _assert_variable_is_not_empty(self, variable_name: str):
- if not any(var.name == variable_name for var in self.variables):
- raise RuleException(f"{variable_name} is required in a ninja rule")
-
-class BuildActionException(Exception):
- pass
-
-class BuildAction(Node):
- '''Describes the dependency edge between inputs and output
- https://ninja-build.org/manual.html#_build_statements'''
-
- def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
- self.output = output
- self.rule = rule
- self.inputs = self._as_list(inputs)
- self.implicits = self._as_list(implicits)
- self.order_only = self._as_list(order_only)
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- '''Variables limited to the scope of this build action'''
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate()
-
- build_statement = f"build {self.output}: {self.rule}"
- if len(self.inputs) > 0:
- build_statement += " "
- build_statement += " ".join(self.inputs)
- if len(self.implicits) > 0:
- build_statement += " | "
- build_statement += " ".join(self.implicits)
- if len(self.order_only) > 0:
- build_statement += " || "
- build_statement += " ".join(self.order_only)
- yield build_statement
- # Yield variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate(self):
- if not self.output:
- raise BuildActionException("Output is required in a ninja build statement")
- if not self.rule:
- raise BuildActionException("Rule is required in a ninja build statement")
-
- def _as_list(self, list_like):
- if list_like is None:
- return []
- if isinstance(list_like, list):
- return list_like
- return [list_like]
-
-class Pool(Node):
- '''https://ninja-build.org/manual.html#ref_pool'''
-
- def __init__(self, name: str, depth: int):
- self.name = name
- self.depth = Variable(name="depth", value=depth, indent=1)
-
- def stream(self) -> Iterator[str]:
- yield f"pool {self.name}"
- yield next(self.depth.stream())
-
-class Subninja(Node):
-
- def __init__(self, subninja: str, chDir: str):
- self.subninja = subninja
- self.chDir = chDir
-
- # TODO(spandandas): Update the syntax when aosp/2064612 lands
- def stream(self) -> Iterator[str]:
- yield f"subninja {self.subninja}"
-
-class Line(Node):
- '''Generic class that can be used for comments/newlines/default_target etc'''
-
- def __init__(self, value:str):
- self.value = value
-
- def stream(self) -> Iterator[str]:
- yield self.value
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
deleted file mode 100644
index 9e80b4b..0000000
--- a/orchestrator/ninja/ninja_writer.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-# TODO: Format the output according to a configurable width variable
-# This will ensure that the generated content fits on a screen and does not
-# require horizontal scrolling
-class Writer:
-
- def __init__(self, file):
- self.file = file
- self.nodes = [] # type Node
-
- def add_variable(self, variable: Variable):
- self.nodes.append(variable)
-
- def add_rule(self, rule: Rule):
- self.nodes.append(rule)
-
- def add_build_action(self, build_action: BuildAction):
- self.nodes.append(build_action)
-
- def add_pool(self, pool: Pool):
- self.nodes.append(pool)
-
- def add_comment(self, comment: str):
- self.nodes.append(Line(value=f"# {comment}"))
-
- def add_default(self, default: str):
- self.nodes.append(Line(value=f"default {default}"))
-
- def add_newline(self):
- self.nodes.append(Line(value=""))
-
- def add_subninja(self, subninja: Subninja):
- self.nodes.append(subninja)
-
- def add_phony(self, name, deps):
- build_action = BuildAction(name, "phony", inputs=deps)
- self.add_build_action(build_action)
-
- def write(self):
- for node in self.nodes:
- for line in node.stream():
- print(line, file=self.file)
diff --git a/orchestrator/ninja/test_ninja_syntax.py b/orchestrator/ninja/test_ninja_syntax.py
deleted file mode 100644
index d922fd2..0000000
--- a/orchestrator/ninja/test_ninja_syntax.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
-
-class TestVariable(unittest.TestCase):
-
- def test_assignment(self):
- variable = Variable(name="key", value="value")
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value with spaces")
- self.assertEqual("key = value with spaces", next(variable.stream()))
- variable = Variable(name="key", value="$some_other_variable")
- self.assertEqual("key = $some_other_variable", next(variable.stream()))
-
- def test_indentation(self):
- variable = Variable(name="key", value="value", indent=0)
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value", indent=1)
- self.assertEqual(" key = value", next(variable.stream()))
-
-class TestRule(unittest.TestCase):
-
- def test_rulename_comes_first(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- self.assertEqual("rule myrule", next(rule.stream()))
-
- def test_command_is_a_required_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- next(rule.stream())
-
- def test_bad_rule_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- rule.add_variable(name="unrecognize_rule_variable", value="value")
-
- def test_rule_variables_are_indented(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
- self.assertEqual(" command = /bin/bash echo", next(stream))
-
- def test_rule_variables_are_sorted(self):
- rule = Rule(name="myrule")
- rule.add_variable("description", "Adding description before command")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # rule always comes first
- self.assertEqual(" command = /bin/bash echo", next(stream))
- self.assertEqual(" description = Adding description before command", next(stream))
-
-class TestBuildAction(unittest.TestCase):
-
- def test_no_inputs(self):
- build = BuildAction(output="out", rule="phony")
- stream = build.stream()
- self.assertEqual("build out: phony", next(stream))
- # Empty output
- build = BuildAction(output="", rule="phony")
- with self.assertRaises(BuildActionException):
- next(build.stream())
- # Empty rule
- build = BuildAction(output="out", rule="")
- with self.assertRaises(BuildActionException):
- next(build.stream())
-
- def test_inputs(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- self.assertEqual("build out: cat input1 input2", next(build.stream()))
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
- self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
-
- def test_variables(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- build.add_variable(name="myvar", value="myval")
- stream = build.stream()
- next(stream)
- self.assertEqual(" myvar = myval", next(stream))
-
-class TestPool(unittest.TestCase):
-
- def test_pool(self):
- pool = Pool(name="mypool", depth=10)
- stream = pool.stream()
- self.assertEqual("pool mypool", next(stream))
- self.assertEqual(" depth = 10", next(stream))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/ninja/test_ninja_writer.py b/orchestrator/ninja/test_ninja_writer.py
deleted file mode 100644
index 703dd4d..0000000
--- a/orchestrator/ninja/test_ninja_writer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from io import StringIO
-
-from ninja_writer import Writer
-from ninja_syntax import Variable, Rule, BuildAction
-
-class TestWriter(unittest.TestCase):
-
- def test_simple_writer(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_variable(Variable(name="cflags", value="-Wall"))
- writer.add_newline()
- cc = Rule(name="cc")
- cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
- writer.add_rule(cc)
- writer.add_newline()
- build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
- writer.add_build_action(build_action)
- writer.write()
- self.assertEqual('''cflags = -Wall
-
-rule cc
- command = gcc $cflags -c $in -o $out
-
-build foo.o: cc foo.c
-''', f.getvalue())
-
- def test_comment(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_comment("This is a comment in a ninja file")
- writer.write()
- self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
deleted file mode 100644
index 8200dc0..0000000
--- a/orchestrator/test_workspace/combo.mcombo
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product1"
- },
- "vendor": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product2"
- },
- "modules": {
- "module_1": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
- }
- }
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
deleted file mode 120000
index d8f235f..0000000
--- a/orchestrator/test_workspace/inner_tree_1/.inner_build
+++ /dev/null
@@ -1 +0,0 @@
-../../inner_build/inner_build_demo.py
\ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
deleted file mode 100644
index 1415082..0000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+++ /dev/null
@@ -1,8 +0,0 @@
-#include <stdio.h>
-
-#include "hello1.h"
-
-void hello1(void) {
- printf("hello1");
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
deleted file mode 100644
index 0309c1c..0000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#pragma once
-
-extern "C" void hello1(void);
-
diff --git a/target/board/BoardConfigPixelCommon.mk b/target/board/BoardConfigPixelCommon.mk
index a970fec..22521b5 100644
--- a/target/board/BoardConfigPixelCommon.mk
+++ b/target/board/BoardConfigPixelCommon.mk
@@ -5,6 +5,7 @@
# Using sha256 for dm-verity partitions. b/156162446
# system, system_other, system_ext and product.
BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
diff --git a/target/product/OWNERS b/target/product/OWNERS
index b3d8998..61f7d45 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -3,3 +3,8 @@
# GSI
per-file gsi_release.mk = file:/target/product/gsi/OWNERS
per-file developer_gsi_keys.mk = file:/target/product/gsi/OWNERS
+
+# Android Go
+per-file go_defaults.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_512.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_common.mk = gkaiser@google.com, rajekumar@google.com
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 90a2577..a0627b7 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -50,7 +50,9 @@
charger \
cmd \
com.android.adbd \
+ com.android.adservices \
com.android.appsearch \
+ com.android.btservices \
com.android.conscrypt \
com.android.cronet \
com.android.extservices \
@@ -60,15 +62,16 @@
com.android.media \
com.android.media.swcodec \
com.android.mediaprovider \
+ com.android.ondevicepersonalization \
com.android.os.statsd \
com.android.permission \
com.android.resolv \
com.android.neuralnetworks \
com.android.scheduling \
com.android.sdkext \
- com.android.sepolicy \
com.android.tethering \
com.android.tzdata \
+ com.android.uwb \
com.android.wifi \
ContactsProvider \
content \
@@ -366,7 +369,6 @@
PRODUCT_PACKAGES_DEBUG := \
adb_keys \
arping \
- com.android.sepolicy.cert-debug.der \
dmuserd \
idlcli \
init-debug.rc \
@@ -378,7 +380,6 @@
procrank \
profcollectd \
profcollectctl \
- remount \
servicedispatcher \
showmap \
sqlite3 \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 5004b85..fbc6ccc 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -42,7 +42,6 @@
# Base modules and settings for the vendor partition.
PRODUCT_PACKAGES += \
android.hardware.cas@1.2-service \
- android.hardware.media.omx@1.0-service \
boringssl_self_test_vendor \
dumpsys_vendor \
fs_config_files_nonsystem \
@@ -69,6 +68,13 @@
selinux_policy_nonsystem \
shell_and_utilities_vendor \
+# OMX not supported for 64bit_only builds
+ifneq ($(TARGET_SUPPORTS_OMX_SERVICE),false)
+ PRODUCT_PACKAGES += \
+ android.hardware.media.omx@1.0-service \
+
+endif
+
# Base module when shipping api level is less than or equal to 29
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
android.hardware.configstore@1.1-service \
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
index 061728f..fc2b8e5 100644
--- a/target/product/core_64_bit_only.mk
+++ b/target/product/core_64_bit_only.mk
@@ -31,3 +31,4 @@
TARGET_SUPPORTS_32_BIT_APPS := false
TARGET_SUPPORTS_64_BIT_APPS := true
+TARGET_SUPPORTS_OMX_SERVICE := false
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 5695803..901302e 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -51,12 +51,16 @@
# Note: core-icu4j is moved back to PRODUCT_BOOT_JARS in product_config.mk at a later stage.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_BOOT_JARS := \
+ com.android.adservices:framework-adservices \
+ com.android.adservices:framework-sdksandbox \
com.android.appsearch:framework-appsearch \
+ com.android.btservices:framework-bluetooth \
com.android.conscrypt:conscrypt \
com.android.i18n:core-icu4j \
com.android.ipsec:android.net.ipsec.ike \
com.android.media:updatable-media \
com.android.mediaprovider:framework-mediaprovider \
+ com.android.ondevicepersonalization:framework-ondevicepersonalization \
com.android.os.statsd:framework-statsd \
com.android.permission:framework-permission \
com.android.permission:framework-permission-s \
@@ -65,12 +69,15 @@
com.android.tethering:framework-connectivity \
com.android.tethering:framework-connectivity-t \
com.android.tethering:framework-tethering \
- com.android.wifi:framework-wifi
+ com.android.uwb:framework-uwb \
+ com.android.wifi:framework-wifi \
# List of system_server classpath jars delivered via apex.
# Keep the list sorted by module names and then library names.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_SYSTEM_SERVER_JARS := \
+ com.android.adservices:service-adservices \
+ com.android.adservices:service-sdksandbox \
com.android.appsearch:service-appsearch \
com.android.art:service-art \
com.android.media:service-media-s \
@@ -90,9 +97,11 @@
# Keep the list sorted by module names and then library names.
# Note: For modules available in Q, DO NOT add new entries here.
PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS := \
+ com.android.btservices:service-bluetooth \
com.android.os.statsd:service-statsd \
com.android.scheduling:service-scheduling \
com.android.tethering:service-connectivity \
+ com.android.uwb:service-uwb \
com.android.wifi:service-wifi \
# Minimal configuration for running dex2oat (default argument values).
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index a8e1e91..39c66da3 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -28,12 +28,6 @@
PhotoTable \
preinstalled-packages-platform-full-base.xml
-# Bluetooth:
-# audio.a2dp.default is a system module. Generic system image includes
-# audio.a2dp.default to support A2DP if board has the capability.
-PRODUCT_PACKAGES += \
- audio.a2dp.default
-
# Net:
# Vendors can use the platform-provided network configuration utilities (ip,
# iptable, etc.) to configure the Linux networking stack, but these utilities
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index f13c9db..1a639ef 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -52,11 +52,6 @@
cppreopts.sh \
otapreopt_script \
-# Bluetooth libraries
-PRODUCT_PACKAGES += \
- audio.a2dp.default \
- audio.hearing_aid.default \
-
# For ringtones that rely on forward lock encryption
PRODUCT_PACKAGES += libfwdlockengine
diff --git a/target/product/gsi/33.txt b/target/product/gsi/33.txt
new file mode 100644
index 0000000..03a143d
--- /dev/null
+++ b/target/product/gsi/33.txt
@@ -0,0 +1,254 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V2-ndk.so
+VNDK-SP: android.hardware.common.fmq-V1-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libdmabufheap.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.hardware.audio.common-V1-ndk.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
+VNDK-core: android.hardware.bluetooth.audio-V2-ndk.so
+VNDK-core: android.hardware.camera.common-V1-ndk.so
+VNDK-core: android.hardware.camera.device-V1-ndk.so
+VNDK-core: android.hardware.camera.metadata-V1-ndk.so
+VNDK-core: android.hardware.camera.provider-V1-ndk.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.drm-V1-ndk.so
+VNDK-core: android.hardware.dumpstate-V1-ndk.so
+VNDK-core: android.hardware.gnss-V2-ndk.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health-V1-ndk.so
+VNDK-core: android.hardware.health.storage-V1-ndk.so
+VNDK-core: android.hardware.identity-V4-ndk.so
+VNDK-core: android.hardware.ir-V1-ndk.so
+VNDK-core: android.hardware.keymaster-V3-ndk.so
+VNDK-core: android.hardware.light-V2-ndk.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack-V1-ndk.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.nfc-V1-ndk.so
+VNDK-core: android.hardware.oemlock-V1-ndk.so
+VNDK-core: android.hardware.power-V3-ndk.so
+VNDK-core: android.hardware.power.stats-V1-ndk.so
+VNDK-core: android.hardware.radio-V1-ndk.so
+VNDK-core: android.hardware.radio.config-V1-ndk.so
+VNDK-core: android.hardware.radio.data-V1-ndk.so
+VNDK-core: android.hardware.radio.messaging-V1-ndk.so
+VNDK-core: android.hardware.radio.modem-V1-ndk.so
+VNDK-core: android.hardware.radio.network-V1-ndk.so
+VNDK-core: android.hardware.radio.sim-V1-ndk.so
+VNDK-core: android.hardware.radio.voice-V1-ndk.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk.so
+VNDK-core: android.hardware.security.dice-V1-ndk.so
+VNDK-core: android.hardware.security.keymint-V2-ndk.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
+VNDK-core: android.hardware.sensors-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger3-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.usb-V1-ndk.so
+VNDK-core: android.hardware.uwb-V1-ndk.so
+VNDK-core: android.hardware.vibrator-V2-ndk.so
+VNDK-core: android.hardware.weaver-V1-ndk.so
+VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
+VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.media.audio.common.types-V1-ndk.so
+VNDK-core: android.media.soundtrigger.types-V1-ndk.so
+VNDK-core: android.system.keystore2-V2-ndk.so
+VNDK-core: android.system.suspend-V1-ndk.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
+VNDK-product: android.hardware.audio.common@2.0.so
+VNDK-product: android.hardware.configstore@1.0.so
+VNDK-product: android.hardware.configstore@1.1.so
+VNDK-product: android.hardware.graphics.allocator@2.0.so
+VNDK-product: android.hardware.graphics.allocator@3.0.so
+VNDK-product: android.hardware.graphics.allocator@4.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-product: android.hardware.graphics.common@1.0.so
+VNDK-product: android.hardware.graphics.common@1.1.so
+VNDK-product: android.hardware.graphics.common@1.2.so
+VNDK-product: android.hardware.graphics.mapper@2.0.so
+VNDK-product: android.hardware.graphics.mapper@2.1.so
+VNDK-product: android.hardware.graphics.mapper@3.0.so
+VNDK-product: android.hardware.graphics.mapper@4.0.so
+VNDK-product: android.hardware.media.bufferpool@2.0.so
+VNDK-product: android.hardware.media.omx@1.0.so
+VNDK-product: android.hardware.media@1.0.so
+VNDK-product: android.hardware.memtrack@1.0.so
+VNDK-product: android.hardware.renderscript@1.0.so
+VNDK-product: android.hardware.soundtrigger@2.0.so
+VNDK-product: android.hidl.memory.token@1.0.so
+VNDK-product: android.hidl.memory@1.0.so
+VNDK-product: android.hidl.safe_union@1.0.so
+VNDK-product: android.hidl.token@1.0.so
+VNDK-product: android.system.suspend@1.0.so
+VNDK-product: libaudioutils.so
+VNDK-product: libbacktrace.so
+VNDK-product: libbase.so
+VNDK-product: libc++.so
+VNDK-product: libcamera_metadata.so
+VNDK-product: libcap.so
+VNDK-product: libcompiler_rt.so
+VNDK-product: libcrypto.so
+VNDK-product: libcurl.so
+VNDK-product: libcutils.so
+VNDK-product: libevent.so
+VNDK-product: libexpat.so
+VNDK-product: libfmq.so
+VNDK-product: libhidlbase.so
+VNDK-product: libhidlmemory.so
+VNDK-product: libion.so
+VNDK-product: libjpeg.so
+VNDK-product: libjsoncpp.so
+VNDK-product: libldacBT_abr.so
+VNDK-product: libldacBT_enc.so
+VNDK-product: liblz4.so
+VNDK-product: liblzma.so
+VNDK-product: libminijail.so
+VNDK-product: libnl.so
+VNDK-product: libpcre2.so
+VNDK-product: libpiex.so
+VNDK-product: libpng.so
+VNDK-product: libprocessgroup.so
+VNDK-product: libprocinfo.so
+VNDK-product: libspeexresampler.so
+VNDK-product: libssl.so
+VNDK-product: libtinyalsa.so
+VNDK-product: libtinyxml2.so
+VNDK-product: libunwindstack.so
+VNDK-product: libutils.so
+VNDK-product: libutilscallstack.so
+VNDK-product: libwifi-system-iface.so
+VNDK-product: libxml2.so
+VNDK-product: libyuv.so
+VNDK-product: libz.so
+VNDK-product: libziparchive.so
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index f33e626..94892dc 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -20,10 +20,12 @@
LLNDK: libvulkan.so
VNDK-SP: android.hardware.common-V2-ndk.so
VNDK-SP: android.hardware.common.fmq-V1-ndk.so
-VNDK-SP: android.hardware.graphics.common-V2-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
VNDK-SP: android.hardware.graphics.common@1.0.so
VNDK-SP: android.hardware.graphics.common@1.1.so
VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
VNDK-SP: android.hardware.graphics.mapper@2.0.so
VNDK-SP: android.hardware.graphics.mapper@2.1.so
VNDK-SP: android.hardware.graphics.mapper@3.0.so
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3a59f6c..41233b2 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -34,7 +34,6 @@
PRODUCT_PACKAGES += \
BasicDreams \
BlockedNumberProvider \
- Bluetooth \
BluetoothMidiService \
BookmarkProvider \
BuiltInPrintService \
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/iorap_large_memory_config.mk
deleted file mode 100644
index 0c6c89a..0000000
--- a/target/product/iorap_large_memory_config.mk
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index ad25a92..4bd8efc 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -1,43 +1,6 @@
LOCAL_PATH:= $(call my-dir)
#######################################
-# verity_key (installed to /, i.e. part of system.img)
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := verity_key
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
-
-# For devices using a separate ramdisk, we need a copy there to establish the chain of trust.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-LOCAL_REQUIRED_MODULES := verity_key_ramdisk
-endif
-
-include $(BUILD_PREBUILT)
-
-#######################################
-# verity_key (installed to ramdisk)
-#
-# Enabling the target when using system-as-root would cause build failure, as TARGET_RAMDISK_OUT
-# points to the same location as TARGET_ROOT_OUT.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
- include $(CLEAR_VARS)
- LOCAL_MODULE := verity_key_ramdisk
- LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
- LOCAL_LICENSE_CONDITIONS := notice
- LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
- LOCAL_MODULE_CLASS := ETC
- LOCAL_SRC_FILES := verity_key
- LOCAL_MODULE_STEM := verity_key
- LOCAL_MODULE_PATH := $(TARGET_RAMDISK_OUT)
- include $(BUILD_PREBUILT)
-endif
-
-#######################################
# adb key, if configured via PRODUCT_ADB_KEYS
ifdef PRODUCT_ADB_KEYS
ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
diff --git a/target/product/security/bluetooth.pk8 b/target/product/security/bluetooth.pk8
new file mode 100644
index 0000000..c6ea434
--- /dev/null
+++ b/target/product/security/bluetooth.pk8
Binary files differ
diff --git a/target/product/security/bluetooth.x509.pem b/target/product/security/bluetooth.x509.pem
new file mode 100644
index 0000000..396d7c9
--- /dev/null
+++ b/target/product/security/bluetooth.x509.pem
@@ -0,0 +1,36 @@
+-----BEGIN CERTIFICATE-----
+MIIGOzCCBCOgAwIBAgIUEiZapaWZVSter06CJMf2kHi8PIswDQYJKoZIhvcNAQEL
+BQAwgasxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQH
+DA1Nb3VudGFpbiBWaWV3MRAwDgYDVQQKDAdBbmRyb2lkMRAwDgYDVQQLDAdBbmRy
+b2lkMScwJQYDVQQDDB5jb20uYW5kcm9pZC5ibHVldG9vdGguc2VydmljZXMxIjAg
+BgkqhkiG9w0BCQEWE2FuZHJvaWRAYW5kcm9pZC5jb20wIBcNMjIwMzE1MDAzNjAz
+WhgPNDc2MDAyMDkwMDM2MDNaMIGrMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2Fs
+aWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4GA1UECgwHQW5kcm9p
+ZDEQMA4GA1UECwwHQW5kcm9pZDEnMCUGA1UEAwweY29tLmFuZHJvaWQuYmx1ZXRv
+b3RoLnNlcnZpY2VzMSIwIAYJKoZIhvcNAQkBFhNhbmRyb2lkQGFuZHJvaWQuY29t
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAsVlq9pozUREGlb8u8Y0A
+fYwPs5OuavNx/EsX03aTjmAXUfSOMAewqzUXDIRjw8UQvOW63utaZ0go9osDPzNf
+VEftmGxW/AUC+HWGaLDQfCYO3ficPPOS7xpEhGZERNbnhvh5qX0NBt6mJygsfpOm
+RPThbi6Ig2Brxh1eqVYqRkTjhNFKD6gCd1PdMmUSF88xEYaZWvTkET89Zh38lLza
+2x/wfNZmCSAVurNw1Kf9NQfYsaGHwMsjrvTyhG93TTYXzRBFzAO2WlBiw6R0tQr8
+ZW5XCM9Yo6AS0KXiU0ZWwOXxhGdr38rNd7j9nZtpFwWmN1kgeb/vpEfq0Ylua9By
+uURnfJZu2K4TbFamuyjihItra2ZKOtFNPDeuggKMCkuZz6WU8FCoMEpnq5P2agxN
+OGAa7ynXdNzek98N3TGX8qtfEgCv6vyuM0gakJ6D9nM43nsCm1LkB/JA0CacWyRz
+ljaLL1C4S43azEOYyOOb94ITnkZCQGtH33kxzamyPLIZ37VF4+v6yTXySLBzOnhe
+Os5uBIDohVJuI838bLhZf8e5mIrnjiKwsmExXiQvgidbwvZKCz9n8YT4iUhWPx4F
+W+GPcivZsvsECcnJ2QURK1zhir5QuLS7ZbAth4kiEUxJ6ujF5jftE+L/ClK2LiY0
+2IXWRCct8J1hfJZZx8lm3PUCAwEAAaNTMFEwHQYDVR0OBBYEFO5CgtQzKbTEd/Q9
+rxK14a9BBwFZMB8GA1UdIwQYMBaAFO5CgtQzKbTEd/Q9rxK14a9BBwFZMA8GA1Ud
+EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAGrGS1zmaoARVq7qhoY+xzSc
+1I/Tzf6vG6aHBC+CcIoSM2oqr6TGH+ADHAY6jhu/qzv1ij3gtoInAkBtkWvYsCIV
+eISPj8Qomcd8EIeW77p+ArKzS4HY5m1c/O4D/5rkl6c0exFq4Pdw9V8xyM98QtLd
+oj4xzzXUTPOIwkROHkj8otcML28m/MC0l/4b+flHnPqKFuLBjhxi9b/ZfwaXfjkx
+TcXpM3nPH8zN7kaJpS1fPW1IJyxJYvT022uK+afpezTmyS/50aOncUGjDJRw8CcO
+B88O8lpizDD3tD7P6jVOpRRJS4SnkVErbIn1xdWER6ubhnnycH7UmDVIx+vNd/t6
+YDa377au8Za+LnbDPfV1+Og+RaJSEIjJgfYyqnjBxGdRGN21VbqJdRzo/eO4ZFd2
+mGVtMosVr0jw4O8r60o9oMMWBTbFpxOI929QdcV+X1Lz8A8BZz0faXfZ2Z9usctu
+W2FtZge3tsJ07z7kuhNdbnm2yQVfd0FqiJsapUjlhgcdFVoDWPuqOfWAoG31ble6
+eiNnxfjiCckPWyciIE6lw97nvavGjlUacH5qVG86hOWU7xyBgeQ0PH4e+Nxr50yU
+A0GMxni1gefZFG8qEPdNRuDT1QdqDGh/8Ea11GEUMXdAxk0UzqyAtLDr6MbwK6lV
+mqmeueFdogdjvQ3mXe94
+-----END CERTIFICATE-----
diff --git a/target/product/security/verity.pk8 b/target/product/security/verity.pk8
deleted file mode 100644
index bebf216..0000000
--- a/target/product/security/verity.pk8
+++ /dev/null
Binary files differ
diff --git a/target/product/security/verity.x509.pem b/target/product/security/verity.x509.pem
deleted file mode 100644
index 86399c3..0000000
--- a/target/product/security/verity.x509.pem
+++ /dev/null
@@ -1,24 +0,0 @@
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
deleted file mode 100644
index 31982d9..0000000
--- a/target/product/security/verity_key
+++ /dev/null
Binary files differ
diff --git a/target/product/verity.mk b/target/product/verity.mk
deleted file mode 100644
index 5f09283..0000000
--- a/target/product/verity.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Provides dependencies necessary for verified boot.
-
-PRODUCT_SUPPORTS_BOOT_SIGNER := true
-PRODUCT_SUPPORTS_VERITY := true
-PRODUCT_SUPPORTS_VERITY_FEC := true
-
-# The dev key is used to sign boot and recovery images, and the verity
-# metadata table. Actual product deliverables will be re-signed by hand.
-# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
-PRODUCT_VERITY_SIGNING_KEY := build/make/target/product/security/verity
-
-PRODUCT_PACKAGES += \
- verity_key
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index ef1dca2..12acc13 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -16,6 +16,7 @@
import itertools
import logging
import os
+import shutil
import struct
import zipfile
@@ -119,7 +120,7 @@
# Re-sign the package after updating the metadata entry.
if OPTIONS.no_signing:
- output_file = prelim_signing
+ shutil.copy(prelim_signing, output_file)
else:
SignOutput(prelim_signing, output_file)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4f2b1bf..979f42b 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -99,15 +99,15 @@
The second dir will be used for lookup if BOARD_USES_RECOVERY_AS_BOOT is
set to true.
- --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+ --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
vbmeta_vendor}_algorithm <algorithm>
- --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+ --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
vbmeta_vendor}_key <key>
Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
the specified image. Otherwise it uses the existing values in info dict.
- --avb_{apex,boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
- vbmeta_vendor}_extra_args <args>
+ --avb_{apex,boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+ vbmeta_system,vbmeta_vendor}_extra_args <args>
Specify any additional args that are needed to AVB-sign the image
(e.g. "--signing_helper /path/to/helper"). The args will be appended to
the existing ones in info dict.
@@ -141,6 +141,12 @@
Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
(/system/system_ext|/system_ext)/etc/selinux.
If not set, error out when the file exists.
+
+ --override_apk_keys <path>
+ Replace all APK keys with this private key
+
+ --override_apex_keys <path>
+ Replace all APEX keys with this private key
"""
from __future__ import print_function
@@ -197,6 +203,8 @@
OPTIONS.vendor_partitions = set()
OPTIONS.vendor_otatools = None
OPTIONS.allow_gsi_debug_sepolicy = False
+OPTIONS.override_apk_keys = None
+OPTIONS.override_apex_keys = None
AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -245,6 +253,10 @@
def GetApkCerts(certmap):
+ if OPTIONS.override_apk_keys is not None:
+ for apk in certmap.keys():
+ certmap[apk] = OPTIONS.override_apk_keys
+
# apply the key remapping to the contents of the file
for apk, cert in certmap.items():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -275,6 +287,15 @@
Raises:
AssertionError: On invalid container / payload key overrides.
"""
+ if OPTIONS.override_apex_keys is not None:
+ for apex in keys_info.keys():
+ keys_info[apex] = (OPTIONS.override_apex_keys, keys_info[apex][1], keys_info[apex][2])
+
+ if OPTIONS.override_apk_keys is not None:
+ key = key_map.get(OPTIONS.override_apk_keys, OPTIONS.override_apk_keys)
+ for apex in keys_info.keys():
+ keys_info[apex] = (keys_info[apex][0], key, keys_info[apex][2])
+
# Apply all the --extra_apex_payload_key options to override the payload
# signing keys in the given keys_info.
for apex, key in OPTIONS.extra_apex_payload_keys.items():
@@ -1429,6 +1450,12 @@
OPTIONS.avb_algorithms['dtbo'] = a
elif o == "--avb_dtbo_extra_args":
OPTIONS.avb_extra_args['dtbo'] = a
+ elif o == "--avb_recovery_key":
+ OPTIONS.avb_keys['recovery'] = a
+ elif o == "--avb_recovery_algorithm":
+ OPTIONS.avb_algorithms['recovery'] = a
+ elif o == "--avb_recovery_extra_args":
+ OPTIONS.avb_extra_args['recovery'] = a
elif o == "--avb_system_key":
OPTIONS.avb_keys['system'] = a
elif o == "--avb_system_algorithm":
@@ -1485,6 +1512,10 @@
OPTIONS.vendor_partitions = set(a.split(","))
elif o == "--allow_gsi_debug_sepolicy":
OPTIONS.allow_gsi_debug_sepolicy = True
+ elif o == "--override_apk_keys":
+ OPTIONS.override_apk_keys = a
+ elif o == "--override_apex_keys":
+ OPTIONS.override_apex_keys = a
else:
return False
return True
@@ -1514,6 +1545,9 @@
"avb_dtbo_algorithm=",
"avb_dtbo_key=",
"avb_dtbo_extra_args=",
+ "avb_recovery_algorithm=",
+ "avb_recovery_key=",
+ "avb_recovery_extra_args=",
"avb_system_algorithm=",
"avb_system_key=",
"avb_system_extra_args=",
@@ -1538,6 +1572,8 @@
"vendor_partitions=",
"vendor_otatools=",
"allow_gsi_debug_sepolicy",
+ "override_apk_keys=",
+ "override_apex_keys=",
],
extra_option_handler=option_handler)
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 36a220c..25c53d3 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -1097,6 +1097,7 @@
boolean signUsingApkSignatureSchemeV2 = true;
boolean signUsingApkSignatureSchemeV4 = false;
SigningCertificateLineage certLineage = null;
+ Integer rotationMinSdkVersion = null;
int argstart = 0;
while (argstart < args.length && args[argstart].startsWith("-")) {
@@ -1157,6 +1158,15 @@
"Error reading lineage file: " + e.getMessage());
}
++argstart;
+ } else if ("--rotation-min-sdk-version".equals(args[argstart])) {
+ String rotationMinSdkVersionString = args[++argstart];
+ try {
+ rotationMinSdkVersion = Integer.parseInt(rotationMinSdkVersionString);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException(
+ "--rotation-min-sdk-version must be a decimal number: " + rotationMinSdkVersionString);
+ }
+ ++argstart;
} else {
usage();
}
@@ -1248,15 +1258,22 @@
}
}
- try (ApkSignerEngine apkSigner =
- new DefaultApkSignerEngine.Builder(
- createSignerConfigs(privateKey, publicKey), minSdkVersion)
- .setV1SigningEnabled(true)
- .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
- .setOtherSignersSignaturesPreserved(false)
- .setCreatedBy("1.0 (Android SignApk)")
- .setSigningCertificateLineage(certLineage)
- .build()) {
+ DefaultApkSignerEngine.Builder builder = new DefaultApkSignerEngine.Builder(
+ createSignerConfigs(privateKey, publicKey), minSdkVersion)
+ .setV1SigningEnabled(true)
+ .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
+ .setOtherSignersSignaturesPreserved(false)
+ .setCreatedBy("1.0 (Android SignApk)");
+
+ if (certLineage != null) {
+ builder = builder.setSigningCertificateLineage(certLineage);
+ }
+
+ if (rotationMinSdkVersion != null) {
+ builder = builder.setMinSdkVersionForRotation(rotationMinSdkVersion);
+ }
+
+ try (ApkSignerEngine apkSigner = builder.build()) {
// We don't preserve the input APK's APK Signing Block (which contains v2
// signatures)
apkSigner.inputApkSigningBlock(null);