aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-08-16 06:58:03 +0000
committerGerrit Code Review <noreply-gerritcodereview@google.com>2022-08-16 06:58:03 +0000
commitbdde8439fdebea5339661f9244685deb213dfb02 (patch)
treec76f123765854491fbaf8c99f4c13cc9db1d6ac5
parent08b769837b211d1ffbd14d1dc31bcae61667a007 (diff)
parent6ead6d4824f0606c7e6a0212f815acc20013f3ff (diff)
downloadbuild-bdde8439fdebea5339661f9244685deb213dfb02.tar.gz
Merge "Snap for 8952093 from 2abfe5f3a356d7e53b561f783c6d9c7d3d468733 to sdk-release" into sdk-releaseplatform-tools-33.0.3
-rw-r--r--core/Makefile114
-rw-r--r--core/OWNERS3
-rw-r--r--core/android_soong_config_vars.mk21
-rw-r--r--core/app_prebuilt_internal.mk2
-rw-r--r--core/board_config.mk2
-rw-r--r--core/board_config_wifi.mk77
-rw-r--r--core/config.mk11
-rw-r--r--core/definitions.mk24
-rw-r--r--core/dex_preopt_odex_install.mk4
-rw-r--r--core/distdir.mk114
-rw-r--r--core/envsetup.mk5
-rw-r--r--core/notice_files.mk2
-rw-r--r--core/os_licensing.mk36
-rw-r--r--core/proguard.flags13
-rw-r--r--core/proguard_basic_keeps.flags10
-rw-r--r--core/soong_config.mk5
-rw-r--r--core/sysprop.mk17
-rw-r--r--core/version_defaults.mk2
-rw-r--r--envsetup.sh21
-rwxr-xr-xfinalize_branch_for_release.sh16
-rw-r--r--orchestrator/README8
-rw-r--r--orchestrator/core/api_assembly.py156
-rw-r--r--orchestrator/core/api_assembly_cc.py48
-rw-r--r--orchestrator/core/api_domain.py28
-rw-r--r--orchestrator/core/api_export.py20
-rw-r--r--orchestrator/core/final_packaging.py117
-rw-r--r--orchestrator/core/inner_tree.py193
-rw-r--r--orchestrator/core/interrogate.py29
-rwxr-xr-xorchestrator/core/lunch.py408
-rw-r--r--orchestrator/core/ninja_runner.py37
-rw-r--r--orchestrator/core/ninja_tools.py59
-rwxr-xr-xorchestrator/core/orchestrator.py119
-rw-r--r--orchestrator/core/test/configs/another/bad.mcombo1
-rw-r--r--orchestrator/core/test/configs/another/dir/a1
-rw-r--r--orchestrator/core/test/configs/b-eng1
-rw-r--r--orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo3
-rw-r--r--orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo1
-rw-r--r--orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt1
-rw-r--r--orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo1
-rw-r--r--orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo1
-rw-r--r--orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo1
-rw-r--r--orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo0
-rw-r--r--orchestrator/core/test/configs/parsing/cycles/1.mcombo5
-rw-r--r--orchestrator/core/test/configs/parsing/cycles/2.mcombo6
-rw-r--r--orchestrator/core/test/configs/parsing/cycles/3.mcombo6
-rw-r--r--orchestrator/core/test/configs/parsing/merge/1.mcombo13
-rw-r--r--orchestrator/core/test/configs/parsing/merge/2.mcombo12
-rw-r--r--orchestrator/core/test/configs/parsing/merge/3.mcombo10
-rw-r--r--orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo1
-rw-r--r--orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo1
-rw-r--r--orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo0
-rwxr-xr-xorchestrator/core/test_lunch.py128
-rw-r--r--orchestrator/core/tree_analysis.py24
-rw-r--r--orchestrator/core/utils.py141
-rw-r--r--orchestrator/demo/buffet_helper.py46
-rw-r--r--orchestrator/demo/build_helper.py367
-rwxr-xr-xorchestrator/demo/collect_metadata.py428
-rw-r--r--orchestrator/demo/envsetup.sh48
-rw-r--r--orchestrator/demo/hierarchy.py79
-rw-r--r--orchestrator/demo/hierarchy.yaml37
-rw-r--r--orchestrator/demo/utils.py89
-rw-r--r--orchestrator/inner_build/common.py60
-rwxr-xr-xorchestrator/inner_build/inner_build_demo.py110
-rwxr-xr-xorchestrator/inner_build/inner_build_soong.py37
-rw-r--r--orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo16
-rw-r--r--orchestrator/multitree_combos/test.mcombo16
-rw-r--r--orchestrator/ninja/ninja_syntax.py172
-rw-r--r--orchestrator/ninja/ninja_writer.py59
-rw-r--r--orchestrator/ninja/test_ninja_syntax.py107
-rw-r--r--orchestrator/ninja/test_ninja_writer.py54
-rw-r--r--orchestrator/test_workspace/combo.mcombo17
l---------orchestrator/test_workspace/inner_tree_1/.inner_build1
-rw-r--r--orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c8
-rw-r--r--orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h4
-rw-r--r--target/board/BoardConfigGsiCommon.mk3
-rw-r--r--target/board/generic_arm64/BoardConfig.mk3
-rwxr-xr-xtarget/board/generic_x86_64/BoardConfig.mk3
-rw-r--r--target/board/gsi_arm64/BoardConfig.mk3
-rw-r--r--target/product/OWNERS5
-rw-r--r--target/product/base_system.mk1
-rw-r--r--target/product/base_vendor.mk8
-rw-r--r--target/product/core_64_bit.mk4
-rw-r--r--target/product/core_64_bit_only.mk1
-rw-r--r--tools/build-runfiles.cc2
-rw-r--r--tools/canoninja/go.mod2
-rw-r--r--tools/compliance/go.mod12
-rw-r--r--tools/compliance/go.sum2
-rw-r--r--tools/releasetools/Android.bp17
-rw-r--r--tools/releasetools/fsverity_manifest_generator.py115
-rw-r--r--tools/releasetools/ota_utils.py3
-rwxr-xr-xtools/releasetools/sign_target_files_apks.py34
91 files changed, 466 insertions, 3586 deletions
diff --git a/core/Makefile b/core/Makefile
index a96cfd5e59..bf31125000 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -963,19 +963,17 @@ define get-partition-size-argument
$(if $(1),--partition_size $(1),--dynamic_partition_size)
endef
+ifndef BOARD_PREBUILT_BOOTIMAGE
+
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
INTERNAL_BOOTIMAGE_ARGS := \
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
-INTERNAL_INIT_BOOT_IMAGE_ARGS :=
-
# TODO(b/229701033): clean up BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK.
ifneq ($(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK),true)
ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
- else
- INTERNAL_INIT_BOOT_IMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
endif
endif
endif
@@ -1184,7 +1182,10 @@ endif # BOARD_AVB_ENABLE
endif # BUILDING_BOOT_IMAGE
else # TARGET_NO_KERNEL == "true"
-ifdef BOARD_PREBUILT_BOOTIMAGE
+INSTALLED_BOOTIMAGE_TARGET :=
+endif # TARGET_NO_KERNEL
+
+else # BOARD_PREBUILT_BOOTIMAGE defined
INTERNAL_PREBUILT_BOOTIMAGE := $(BOARD_PREBUILT_BOOTIMAGE)
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
@@ -1206,10 +1207,8 @@ $(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE)
cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
endif # BOARD_AVB_ENABLE
-else # BOARD_PREBUILT_BOOTIMAGE not defined
-INSTALLED_BOOTIMAGE_TARGET :=
endif # BOARD_PREBUILT_BOOTIMAGE
-endif # TARGET_NO_KERNEL
+
endif # my_installed_prebuilt_gki_apex not defined
my_apex_extracted_boot_image :=
@@ -1222,6 +1221,8 @@ ifeq ($(BUILDING_INIT_BOOT_IMAGE),true)
INSTALLED_INIT_BOOT_IMAGE_TARGET := $(PRODUCT_OUT)/init_boot.img
$(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_RAMDISK_TARGET)
+INTERNAL_INIT_BOOT_IMAGE_ARGS := --ramdisk $(INSTALLED_RAMDISK_TARGET)
+
ifdef BOARD_KERNEL_PAGESIZE
INTERNAL_INIT_BOOT_IMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
endif
@@ -1688,6 +1689,63 @@ endif # PRODUCT_NOTICE_SPLIT
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
+need_vendor_notice:=false
+ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
+ need_vendor_notice:=true
+endif
+
+ifdef BUILDING_DEBUG_VENDOR_BOOT_IMAGE
+ need_vendor_notice:=true
+endif
+
+ifdef BUILDING_VENDOR_IMAGE
+ need_vendor_notice:=true
+endif
+
+ifeq (true,$(need_vendor_notice))
+ifneq (,$(installed_vendor_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
+endif
+endif
+
+need_vendor_notice:=
+
+ifdef BUILDING_ODM_IMAGE
+ifneq (,$(installed_odm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_PRODUCT_IMAGE
+ifneq (,$(installed_product_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ifneq (,$(installed_system_ext_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_VENDOR_DLKM_IMAGE
+ifneq (,$(installed_vendor_dlkm_notice_xml_gz)
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_ODM_DLKM_IMAGE
+ifneq (,$(installed_odm_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+ifneq (,$(installed_system_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
+endif
+endif
+
endif # TARGET_BUILD_APPS
# The kernel isn't really a module, so to get its module file in there, we
@@ -2979,20 +3037,29 @@ $(FSVERITY_APK_OUT): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_PATH)
$(FSVERITY_APK_OUT): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
$(FSVERITY_APK_OUT): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
$(FSVERITY_APK_OUT): PRIVATE_INPUTS := $(fsverity-metadata-targets)
+$(FSVERITY_APK_OUT): PRIVATE_ASSETS := $(call intermediates-dir-for,ETC,build_manifest)/assets
$(FSVERITY_APK_OUT): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
$(HOST_OUT_EXECUTABLES)/fsverity $(HOST_OUT_EXECUTABLES)/aapt2 \
$(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_PATH) \
$(FSVERITY_APK_KEY_PATH).x509.pem $(FSVERITY_APK_KEY_PATH).pk8 \
$(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk \
$(fsverity-metadata-targets)
- $< --fsverity-path $(PRIVATE_FSVERITY) --aapt2-path $(PRIVATE_AAPT2) \
+ rm -rf $(PRIVATE_ASSETS)
+ mkdir -p $(PRIVATE_ASSETS)
+ $< --fsverity-path $(PRIVATE_FSVERITY) \
+ --base-dir $(PRODUCT_OUT) \
+ --output $(PRIVATE_ASSETS)/build_manifest.pb \
+ $(PRIVATE_INPUTS)
+ $(PRIVATE_AAPT2) link -o $@ \
+ -A $(PRIVATE_ASSETS) \
+ -I $(PRIVATE_FRAMEWORK_RES) \
--min-sdk-version $(PRIVATE_MIN_SDK_VERSION) \
--version-code $(PRIVATE_VERSION_CODE) \
--version-name $(PRIVATE_VERSION_NAME) \
- --apksigner-path $(PRIVATE_APKSIGNER) --apk-key-path $(PRIVATE_KEY) \
- --apk-manifest-path $(PRIVATE_MANIFEST) --framework-res $(PRIVATE_FRAMEWORK_RES) \
- --output $@ \
- --base-dir $(PRODUCT_OUT) $(PRIVATE_INPUTS)
+ --manifest $(PRIVATE_MANIFEST)
+ $(PRIVATE_APKSIGNER) sign --in $@ \
+ --cert $(PRIVATE_KEY).x509.pem \
+ --key $(PRIVATE_KEY).pk8
ALL_DEFAULT_INSTALLED_MODULES += $(FSVERITY_APK_OUT)
@@ -3858,6 +3925,11 @@ $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET): \
$(INSTALLED_FILES_FILE_SYSTEM_DLKM)
$(build-system_dlkmimage-target)
+SYSTEM_DLKM_NOTICE_DEPS += $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+
+$(call declare-1p-container,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEM_DLKM),$(PRODUCT_OUT)/:/)
+
.PHONY: system_dlkmimage-nodeps sdnod
system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-system_dlkmimage-target)
@@ -6189,7 +6261,7 @@ $(APPCOMPAT_ZIP): $(SOONG_ZIP)
# The mac build doesn't build dex2oat, so create the zip file only if the build OS is linux.
ifeq ($(BUILD_OS),linux)
ifneq ($(DEX2OAT),)
-dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN) $(AAPT2)
+dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN)
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/dexdump
dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/oatdump
DEXPREOPT_TOOLS_ZIP := $(PRODUCT_OUT)/dexpreopt_tools.zip
@@ -6524,22 +6596,22 @@ INSTALLED_SUPERIMAGE_DEPENDENCIES += $(INSTALLED_SYSTEMOTHERIMAGE_TARGET)
endif
endif
-# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
-# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
-# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
-# virtual devices.
-ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
$(INSTALLED_SUPERIMAGE_TARGET): $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
$(call pretty,"Target super fs image for debug: $@")
$(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
$(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt)
-droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
-
# For devices that uses super image directly, the superimage target points to the file in $(PRODUCT_OUT).
.PHONY: superimage
superimage: $(INSTALLED_SUPERIMAGE_TARGET)
+# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
+# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
+# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
+# virtual devices.
+ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
+droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
+
$(call dist-for-goals,dist_files,$(INSTALLED_MISC_INFO_TARGET):super_misc_info.txt)
endif # BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT
diff --git a/core/OWNERS b/core/OWNERS
index 980186cdaa..d48ceab5c5 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,9 @@
per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
+# For global Proguard rules
+per-file proguard*.flags = jdduke@google.com
+
# For version updates
per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index cdd48e7cc6..ca4c60669e 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -26,6 +26,7 @@ $(call add_soong_config_namespace,ANDROID)
# Add variables to the namespace below:
+$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
$(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
$(call add_soong_config_var,ANDROID,IS_TARGET_MIXED_SEPOLICY)
ifeq ($(IS_TARGET_MIXED_SEPOLICY),true)
@@ -75,6 +76,7 @@ $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
# default.
INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
+ permission \
wifi \
$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
@@ -98,7 +100,20 @@ endif
# TODO(b/203088572): Remove when Java optimizations enabled by default for
# SystemUI.
$(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
-# Enable by default unless explicitly set or overridden.
-# See frameworks/base/services/Android.bp for additional notes on side effects.
-SYSTEM_OPTIMIZE_JAVA ?= true
+
+# Enable system_server optimizations by default unless explicitly set or if
+# there may be dependent runtime jars.
+# TODO(b/240588226): Remove the off-by-default exceptions after handling
+# system_server jars automatically w/ R8.
+ifeq (true,$(PRODUCT_BROKEN_SUBOPTIMAL_ORDER_OF_SYSTEM_SERVER_JARS))
+ # If system_server jar ordering is broken, don't assume services.jar can be
+ # safely optimized in isolation, as there may be dependent jars.
+ SYSTEM_OPTIMIZE_JAVA ?= false
+else ifneq (platform:services,$(lastword $(PRODUCT_SYSTEM_SERVER_JARS)))
+ # If services is not the final jar in the dependency ordering, don't assume
+ # it can be safely optimized in isolation, as there may be dependent jars.
+ SYSTEM_OPTIMIZE_JAVA ?= false
+else
+ SYSTEM_OPTIMIZE_JAVA ?= true
+endif
$(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 79639a8301..8d057ac0b4 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -275,7 +275,7 @@ $(error You must put all the split source apks in the same folder: $(LOCAL_PACKA
endif
my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
-$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR) $(SIGNAPK_JNI_LIBRARY_PATH)
$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
$(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
diff --git a/core/board_config.mk b/core/board_config.mk
index d2803490f8..a0c16ca7b5 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -283,6 +283,8 @@ $(foreach var,$(_board_true_false_vars), \
$(if $(filter-out true false,$($(var))), \
$(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
+include $(BUILD_SYSTEM)/board_config_wifi.mk
+
# Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
new file mode 100644
index 0000000000..ddeb0d7d6d
--- /dev/null
+++ b/core/board_config_wifi.mk
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ###############################################################
+# This file adds WIFI variables into soong config namespace (`wifi`)
+# ###############################################################
+
+ifdef BOARD_WLAN_DEVICE
+ $(call soong_config_set,wifi,board_wlan_device,$(BOARD_WLAN_DEVICE))
+endif
+ifdef WIFI_DRIVER_MODULE_PATH
+ $(call soong_config_set,wifi,driver_module_path,$(WIFI_DRIVER_MODULE_PATH))
+endif
+ifdef WIFI_DRIVER_MODULE_ARG
+ $(call soong_config_set,wifi,driver_module_arg,$(WIFI_DRIVER_MODULE_ARG))
+endif
+ifdef WIFI_DRIVER_MODULE_NAME
+ $(call soong_config_set,wifi,driver_module_name,$(WIFI_DRIVER_MODULE_NAME))
+endif
+ifdef WIFI_DRIVER_FW_PATH_STA
+ $(call soong_config_set,wifi,driver_fw_path_sta,$(WIFI_DRIVER_FW_PATH_STA))
+endif
+ifdef WIFI_DRIVER_FW_PATH_AP
+ $(call soong_config_set,wifi,driver_fw_path_ap,$(WIFI_DRIVER_FW_PATH_AP))
+endif
+ifdef WIFI_DRIVER_FW_PATH_P2P
+ $(call soong_config_set,wifi,driver_fw_path_p2p,$(WIFI_DRIVER_FW_PATH_P2P))
+endif
+ifdef WIFI_DRIVER_FW_PATH_PARAM
+ $(call soong_config_set,wifi,driver_fw_path_param,$(WIFI_DRIVER_FW_PATH_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_CTRL_PARAM
+ $(call soong_config_set,wifi,driver_state_ctrl_param,$(WIFI_DRIVER_STATE_CTRL_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_ON
+ $(call soong_config_set,wifi,driver_state_on,$(WIFI_DRIVER_STATE_ON))
+endif
+ifdef WIFI_DRIVER_STATE_OFF
+ $(call soong_config_set,wifi,driver_state_off,$(WIFI_DRIVER_STATE_OFF))
+endif
+ifdef WIFI_MULTIPLE_VENDOR_HALS
+ $(call soong_config_set,wifi,multiple_vendor_hals,$(WIFI_MULTIPLE_VENDOR_HALS))
+endif
+ifneq ($(wildcard vendor/google/libraries/GoogleWifiConfigLib),)
+ $(call soong_config_set,wifi,google_wifi_config_lib,true)
+endif
+ifdef WIFI_HAL_INTERFACE_COMBINATIONS
+ $(call soong_config_set,wifi,hal_interface_combinations,$(WIFI_HAL_INTERFACE_COMBINATIONS))
+endif
+ifdef WIFI_HIDL_FEATURE_AWARE
+ $(call soong_config_set,wifi,hidl_feature_aware,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DUAL_INTERFACE
+ $(call soong_config_set,wifi,hidl_feature_dual_interface,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP
+ $(call soong_config_set,wifi,hidl_feature_disable_ap,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP_MAC_RANDOMIZATION
+ $(call soong_config_set,wifi,hidl_feature_disable_ap_mac_randomization,true)
+endif
+ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
+ $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif \ No newline at end of file
diff --git a/core/config.mk b/core/config.mk
index c0dea95e33..80828317f0 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -803,6 +803,7 @@ ifdef PRODUCT_MAINLINE_SEPOLICY_DEV_CERTIFICATES
else
MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
endif
+.KATI_READONLY := MAINLINE_SEPOLICY_DEV_CERTIFICATES
BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -973,16 +974,6 @@ $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
$(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
)
-# BOARD_*_PARTITION_LIST: a list of the following tokens
-valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm system_dlkm
-$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
- $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
- $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
- $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
- Valid names are $(valid_super_partition_list))))
-valid_super_partition_list :=
-
-
# Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
$(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
diff --git a/core/definitions.mk b/core/definitions.mk
index a9d5733e11..cbb1613a61 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2976,6 +2976,19 @@ $(2): $(1)
$$(copy-file-to-target)
endef
+# Define a rule to copy a license metadata file. For use via $(eval).
+# $(1): source license metadata file
+# $(2): destination license metadata file
+# $(3): built targets
+# $(4): installed targets
+define copy-one-license-metadata-file
+$(2): PRIVATE_BUILT=$(3)
+$(2): PRIVATE_INSTALLED=$(4)
+$(2): $(1)
+ @echo "Copy: $$@"
+ $$(call copy-license-metadata-file-to-target,$$(PRIVATE_BUILT),$$(PRIVATE_INSTALLED))
+endef
+
define copy-and-uncompress-dexs
$(2): $(1) $(ZIPALIGN) $(ZIP2ZIP)
@echo "Uncompress dexs in: $$@"
@@ -3163,6 +3176,17 @@ $(hide) rm -f $@
$(hide) cp "$<" "$@"
endef
+# Same as copy-file-to-target, but assume file is a licenes metadata file,
+# and append built from $(1) and installed from $(2).
+define copy-license-metadata-file-to-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) cp "$<" "$@" $(strip \
+ $(foreach b,$(1), && (grep -F 'built: "'"$(b)"'"' "$@" >/dev/null || echo 'built: "'"$(b)"'"' >>"$@")) \
+ $(foreach i,$(2), && (grep -F 'installed: "'"$(i)"'"' "$@" >/dev/null || echo 'installed: "'"$(i)"'"' >>"$@")) \
+)
+endef
+
# The same as copy-file-to-target, but use the local
# cp command instead of acp.
define copy-file-to-target-with-cp
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 216168b0af..b303b52f12 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -245,7 +245,7 @@ ifeq (true,$(LOCAL_ENFORCE_USES_LIBRARIES))
$(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
$(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
$(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
- $(my_enforced_uses_libraries): $(AAPT)
+ $(my_enforced_uses_libraries): $(AAPT2)
$(my_enforced_uses_libraries): $(my_verify_script)
$(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
$(my_enforced_uses_libraries): $(my_manifest_or_apk)
@@ -254,7 +254,7 @@ ifeq (true,$(LOCAL_ENFORCE_USES_LIBRARIES))
$(my_verify_script) \
--enforce-uses-libraries \
--enforce-uses-libraries-status $@ \
- --aapt $(AAPT) \
+ --aapt $(AAPT2) \
$(PRIVATE_USES_LIBRARIES) \
$(PRIVATE_OPTIONAL_USES_LIBRARIES) \
$(PRIVATE_DEXPREOPT_CONFIGS) \
diff --git a/core/distdir.mk b/core/distdir.mk
index 8f48cf817c..bce8e7fd3c 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -49,21 +49,18 @@ endef
define __share-projects-rule
$(1) : PRIVATE_TARGETS := $(2)
-$(1) : PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,codesharing)/$(1)/arguments
$(1): $(2) $(COMPLIANCE_LISTSHARE)
$(hide) rm -f $$@
mkdir -p $$(dir $$@)
- mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
- $$(if $$(strip $$(PRIVATE_TARGETS)),$$(call dump-words-to-file,$$(PRIVATE_TARGETS),$$(PRIVATE_ARGUMENT_FILE)))
- $$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(COMPLIANCE_LISTSHARE) -o $$@ @$$(PRIVATE_ARGUMENT_FILE),touch $$@)
+ $$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(COMPLIANCE_LISTSHARE) -o $$@ $$(PRIVATE_TARGETS),touch $$@)
endef
-# build list of projects to share in $(1) for dist targets in $(2)
+# build list of projects to share in $(1) for meta_lic in $(2)
#
# $(1): the intermediate project sharing file
-# $(2): the dist files to base the sharing on
+# $(2): the license metadata to base the sharing on
define _share-projects-rule
-$(eval $(call __share-projects-rule,$(1),$(call corresponding-license-metadata,$(2))))
+$(eval $(call __share-projects-rule,$(1),$(2)))
endef
.PHONY: alllicensetexts
@@ -86,32 +83,99 @@ $(2): $(3) $(TEXTNOTICE)
$$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(TEXTNOTICE) -o $$@ @$$(PRIVATE_ARGUMENT_FILE),touch $$@)
endef
-# build list of projects to share in $(2) for dist targets in $(3) for dist goal $(1)
+# build list of projects to share in $(2) for meta_lic in $(3) for dist goals $(1)
+# Strip `out/dist/` used as proxy for 'DIST_DIR'
#
-# $(1): the name of the dist goal
+# $(1): the name of the dist goals
# $(2): the intermediate project sharing file
-# $(3): the dist files to base the sharing on
+# $(3): the license metadata to base the sharing on
define _license-texts-rule
-$(eval $(call __license-texts-rule,$(1),$(2),$(call corresponding-license-metadata,$(3)),$(sort $(dir $(3)))))
+$(eval $(call __license-texts-rule,$(1),$(2),$(3),out/dist/))
+endef
+
+###########################################################
+## License metadata build rule for dist target $(1) with meta_lic $(2) copied from $(3)
+###########################################################
+define _dist-target-license-metadata-rule
+$(strip $(eval _meta :=$(2)))
+$(strip $(eval _dep:=))
+# 0p is the indicator for a non-copyrightable file where no party owns the copyright.
+# i.e. pure data with no copyrightable expression.
+# If all of the sources are 0p and only 0p, treat the copied file as 0p. Otherwise, all
+# of the sources must either be 0p or originate from a single metadata file to copy.
+$(strip $(foreach s,$(strip $(3)),\
+ $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+ $(if $(strip $(_dmeta)),\
+ $(if $(filter-out 0p,$(_dep)),\
+ $(if $(filter-out $(_dep) 0p,$(_dmeta)),\
+ $(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta)),\
+ $(if $(filter 0p,$(_dep)),$(eval _dep:=$(_dmeta)))),\
+ $(eval _dep:=$(_dmeta))\
+ ),\
+ $(eval TARGETS_MISSING_LICENSE_METADATA += $(s) $(1)))))
+
+
+ifeq (0p,$(strip $(_dep)))
+# Not copyrightable. No emcumbrances, no license text, no license kind etc.
+$(_meta): PRIVATE_CONDITIONS := unencumbered
+$(_meta): PRIVATE_SOURCES := $(3)
+$(_meta): PRIVATE_INSTALLED := $(1)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,notice)/$(1)/arguments
+$(_meta): $(BUILD_LICENSE_METADATA)
+$(_meta) :
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(call dump-words-to-file,\
+ $$(addprefix -c ,$$(PRIVATE_CONDITIONS))\
+ $$(addprefix -s ,$$(PRIVATE_SOURCES))\
+ $$(addprefix -t ,$$(PRIVATE_TARGETS))\
+ $$(addprefix -i ,$$(PRIVATE_INSTALLED)),\
+ $$(PRIVATE_ARGUMENT_FILE))
+ OUT_DIR=$(OUT_DIR) $(BUILD_LICENSE_METADATA) \
+ @$$(PRIVATE_ARGUMENT_FILE) \
+ -o $$@
+
+else ifneq (,$(strip $(_dep)))
+# Not a missing target, copy metadata and `is_container` etc. from license metadata file `$(_dep)`
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(3)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,copynotice)/$(1)/arguments
+$(_meta) : $(_dep) $(COPY_LICENSE_METADATA)
+ rm -f $$@
+ mkdir -p $$(dir $$@)
+ mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+ $$(call dump-words-to-file,\
+ $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+ $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+ $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+ $$(PRIVATE_ARGUMENT_FILE))
+ OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+ @$$(PRIVATE_ARGUMENT_FILE) \
+ -o $$@
+
+endif
endef
+# use `out/dist/` as a proxy for 'DIST_DIR'
define _add_projects_to_share
+$(strip $(eval _mdir := $(call intermediates-dir-for,METAPACKAGING,meta)/out/dist)) \
$(strip $(eval _idir := $(call intermediates-dir-for,METAPACKAGING,shareprojects))) \
$(strip $(eval _tdir := $(call intermediates-dir-for,METAPACKAGING,licensetexts))) \
-$(strip $(eval _goals := $(sort $(_all_dist_goals)))) \
-$(strip $(eval _opairs := $(sort $(_all_dist_goal_output_pairs)))) \
-$(strip $(eval _dpairs := $(sort $(_all_dist_src_dst_pairs)))) \
-$(strip $(eval _allt :=)) \
-$(foreach goal,$(_goals), \
- $(eval _f := $(_idir)/$(goal).shareprojects) \
- $(eval _n := $(_tdir)/$(goal).txt) \
- $(call dist-for-goals,$(goal),$(_f):shareprojects/$(basename $(notdir $(_f)))) \
- $(call dist-for-goals,$(goal),$(_n):licensetexts/$(basename $(notdir $(_n)))) \
- $(eval _targets :=) \
- $(foreach op,$(filter $(goal):%,$(_opairs)),$(foreach p,$(filter %:$(call word-colon,2,$(op)),$(_dpairs)),$(eval _targets += $(call word-colon,1,$(p))))) \
- $(eval _allt += $(_targets)) \
- $(eval $(call _share-projects-rule,$(_f),$(_targets))) \
- $(eval $(call _license-texts-rule,$(goal),$(_n),$(_targets))) \
+$(strip $(eval _allt := $(sort $(foreach goal,$(_all_dist_goal_output_pairs),$(call word-colon,2,$(goal)))))) \
+$(foreach target,$(_allt), \
+ $(eval _goals := $(sort $(foreach dg,$(filter %:$(target),$(_all_dist_goal_output_pairs)),$(call word-colon,1,$(dg))))) \
+ $(eval _srcs := $(sort $(foreach sdp,$(filter %:$(target),$(_all_dist_src_dst_pairs)),$(call word-colon,1,$(sdp))))) \
+ $(eval $(call _dist-target-license-metadata-rule,out/dist/$(target),$(_mdir)/out/dist/$(target).meta_lic,$(_srcs))) \
+ $(eval _f := $(_idir)/$(target).shareprojects) \
+ $(eval _n := $(_tdir)/$(target).txt) \
+ $(eval $(call dist-for-goals,$(_goals),$(_f):shareprojects/$(target).shareprojects)) \
+ $(eval $(call dist-for-goals,$(_goals),$(_n):licensetexts/$(target).txt)) \
+ $(eval $(call _share-projects-rule,$(_f),$(foreach t, $(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
+ $(eval $(call _license-texts-rule,$(_goals),$(_n),$(foreach t,$(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
)
endef
diff --git a/core/envsetup.mk b/core/envsetup.mk
index d116aaf164..fc4afd915e 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -323,7 +323,9 @@ endif
# likely to be relevant to the product or board configuration.
# Soong config variables are dumped as $(call soong_config_set) calls
# instead of the raw variable values, because mk2rbc can't read the
-# raw ones.
+# raw ones. There is a final sed command on the output file to
+# remove leading spaces because I couldn't figure out how to remove
+# them in pure make code.
define dump-variables-rbc
$(eval _dump_variables_rbc_excluded := \
BUILD_NUMBER \
@@ -345,6 +347,7 @@ $(v) := $(strip $($(v)))$(newline))\
$(foreach ns,$(sort $(SOONG_CONFIG_NAMESPACES)),\
$(foreach v,$(sort $(SOONG_CONFIG_$(ns))),\
$$(call soong_config_set,$(ns),$(v),$(SOONG_CONFIG_$(ns)_$(v)))$(newline))))
+$(shell sed -i "s/^ *//g" $(1))
endef
# Read the product specs so we can get TARGET_DEVICE and other
diff --git a/core/notice_files.mk b/core/notice_files.mk
index cbfcaa4f7a..efc17511c0 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -135,7 +135,7 @@ ifdef my_register_name
ifdef LOCAL_SOONG_LICENSE_METADATA
# Soong modules have already produced a license metadata file, copy it to where Make expects it.
- $(eval $(call copy-one-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata)))
+ $(eval $(call copy-one-license-metadata-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata),$(ALL_MODULES.$(my_register_name).BUILT),$(ALL_MODUES.$(my_register_name).INSTALLED)))
else
# Make modules don't have enough information to produce a license metadata rule until after fix-notice-deps
# has been called, store the necessary information until later.
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index d8d3c78945..416e4b22af 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -23,7 +23,6 @@ endif
$(call declare-0p-target,$(target_notice_file_xml_gz))
$(call declare-0p-target,$(installed_notice_html_or_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
endif
.PHONY: vendorlicense
@@ -35,18 +34,17 @@ VENDOR_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS)
$(eval $(call text-notice-rule,$(target_vendor_notice_file_txt),"Vendor image", \
"Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
- $(VENDOR_NOTICE_DEPS)))
+ $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_vendor_notice_file_xml_gz),"Vendor image", \
"Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
- $(VENDOR_NOTICE_DEPS)))
+ $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
$(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
$(call declare-0p-target,$(installed_vendor_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
endif
.PHONY: odmlicense
@@ -55,18 +53,17 @@ odmlicense: $(call corresponding-license-metadata, $(ODM_NOTICE_DEPS)) reportmis
ifneq (,$(ODM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_odm_notice_file_txt),"ODM filesystem image", \
"Notices for files contained in the odm filesystem image in this directory:", \
- $(ODM_NOTICE_DEPS)))
+ $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_odm_notice_file_xml_gz),"ODM filesystem image", \
"Notices for files contained in the odm filesystem image in this directory:", \
- $(ODM_NOTICE_DEPS)))
+ $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
$(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_odm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_odm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
endif
.PHONY: oemlicense
@@ -78,18 +75,17 @@ productlicense: $(call corresponding-license-metadata, $(PRODUCT_NOTICE_DEPS)) r
ifneq (,$(PRODUCT_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_product_notice_file_txt),"Product image", \
"Notices for files contained in the product filesystem image in this directory:", \
- $(PRODUCT_NOTICE_DEPS)))
+ $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_product_notice_file_xml_gz),"Product image", \
"Notices for files contained in the product filesystem image in this directory:", \
- $(PRODUCT_NOTICE_DEPS)))
+ $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
$(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_product_notice_file_xml_gz))
$(call declare-0p-target,$(installed_product_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
endif
.PHONY: systemextlicense
@@ -98,18 +94,17 @@ systemextlicense: $(call corresponding-license-metadata, $(SYSTEM_EXT_NOTICE_DEP
ifneq (,$(SYSTEM_EXT_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_system_ext_notice_file_txt),"System_ext image", \
"Notices for files contained in the system_ext filesystem image in this directory:", \
- $(SYSTEM_EXT_NOTICE_DEPS)))
+ $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_system_ext_notice_file_xml_gz),"System_ext image", \
"Notices for files contained in the system_ext filesystem image in this directory:", \
- $(SYSTEM_EXT_NOTICE_DEPS)))
+ $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
$(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
$(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
endif
.PHONY: vendor_dlkmlicense
@@ -118,18 +113,17 @@ vendor_dlkmlicense: $(call corresponding-license-metadata, $(VENDOR_DLKM_NOTICE_
ifneq (,$(VENDOR_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_vendor_dlkm_notice_file_txt),"Vendor_dlkm image", \
"Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
- $(VENDOR_DLKM_NOTICE_DEPS)))
+ $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_vendor_dlkm_notice_file_xml_gz),"Vendor_dlkm image", \
"Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
- $(VENDOR_DLKM_NOTICE_DEPS)))
+ $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
$(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
endif
.PHONY: odm_dlkmlicense
@@ -138,18 +132,17 @@ odm_dlkmlicense: $(call corresponding-license-metadata, $(ODM_DLKM_NOTICE_DEPS))
ifneq (,$(ODM_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_odm_dlkm_notice_file_txt),"ODM_dlkm filesystem image", \
"Notices for files contained in the odm_dlkm filesystem image in this directory:", \
- $(ODM_DLKM_NOTICE_DEPS)))
+ $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_odm_dlkm_notice_file_xml_gz),"ODM_dlkm filesystem image", \
"Notices for files contained in the odm_dlkm filesystem image in this directory:", \
- $(ODM_DLMK_NOTICE_DEPS)))
+ $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
$(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
endif
.PHONY: system_dlkmlicense
@@ -158,18 +151,17 @@ system_dlkmlicense: $(call corresponding-license-metadata, $(SYSTEM_DLKM_NOTICE_
ifneq (,$(SYSTEM_DLKM_NOTICE_DEPS))
$(eval $(call text-notice-rule,$(target_system_dlkm_notice_file_txt),"System_dlkm filesystem image", \
"Notices for files contained in the system_dlkm filesystem image in this directory:", \
- $(SYSTEM_DLKM_NOTICE_DEPS)))
+ $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
$(eval $(call xml-notice-rule,$(target_system_dlkm_notice_file_xml_gz),"System_dlkm filesystem image", \
"Notices for files contained in the system_dlkm filesystem image in this directory:", \
- $(SYSTEM_DLMK_NOTICE_DEPS)))
+ $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
$(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
$(copy-file-to-target)
$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
endif
endif # not TARGET_BUILD_APPS
diff --git a/core/proguard.flags b/core/proguard.flags
index aee5271744..53f63d8ea0 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,10 +9,15 @@
# Add this flag in your package's own configuration if it's needed.
#-flattenpackagehierarchy
-# Keep classes and methods that have the guava @VisibleForTesting annotation
--keep @**.VisibleForTesting class *
--keepclassmembers class * {
-@**.VisibleForTesting *;
+# Keep classes and methods that have @VisibleForTesting annotations, except in
+# intermediate libraries that export those annotations (e.g., androidx, guava).
+# This avoids keeping library-specific test code that isn't actually needed
+# for platform testing.
+# TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
+# and com.google.common.annotations.VisibleForTesting use in platform code.
+-keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
+-keepclassmembers class !androidx.**,!com.google.common.**,* {
+ @**.VisibleForTesting *;
}
# Keep rule for members that are needed solely to keep alive downstream weak
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 30c2341649..b5d14fa6b2 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -2,6 +2,11 @@
# that isn't explicitly part of the API
-dontskipnonpubliclibraryclasses -dontskipnonpubliclibraryclassmembers
+# Annotations are implemented as attributes, so we have to explicitly keep them.
+# Keep all runtime-visible annotations like RuntimeVisibleParameterAnnotations
+# and RuntimeVisibleTypeAnnotations, as well as associated defaults.
+-keepattributes RuntimeVisible*Annotation*,AnnotationDefault
+
# For enumeration classes, see http://proguard.sourceforge.net/manual/examples.html#enumerations
-keepclassmembers enum * {
public static **[] values();
@@ -74,5 +79,6 @@
-dontnote
# The lite proto runtime uses reflection to access fields based on the names in
-# the schema, keep all the fields.
--keepclassmembers class * extends com.google.protobuf.MessageLite { <fields>; }
+# the schema, keep all the fields. Wildcard is used to apply the rule to classes
+# that have been renamed with jarjar.
+-keepclassmembers class * extends **.protobuf.MessageLite { <fields>; }
diff --git a/core/soong_config.mk b/core/soong_config.mk
index feffcc7bba..28ceebdc34 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -94,6 +94,7 @@ $(call add_json_str, AAPTPreferredConfig, $(PRODUCT_AAPT_PREF_CON
$(call add_json_list, AAPTPrebuiltDPI, $(PRODUCT_AAPT_PREBUILT_DPI))
$(call add_json_str, DefaultAppCertificate, $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+$(call add_json_str, MainlineSepolicyDevCertificates, $(MAINLINE_SEPOLICY_DEV_CERTIFICATES))
$(call add_json_str, AppsDefaultVersionName, $(APPS_DEFAULT_VERSION_NAME))
@@ -250,7 +251,7 @@ $(call add_json_map, VendorVars)
$(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
$(call add_json_map, $(namespace))\
$(foreach key,$(SOONG_CONFIG_$(namespace)),\
- $(call add_json_str,$(key),$(SOONG_CONFIG_$(namespace)_$(key))))\
+ $(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_$(namespace)_$(key)))))\
$(call end_json_map))
$(call end_json_map)
@@ -293,6 +294,8 @@ $(call add_json_list, SepolicyFreezeTestExtraPrebuiltDirs, $(SEPOLICY_FREEZE_TES
$(call add_json_bool, GenerateAidlNdkPlatformBackend, $(filter true,$(NEED_AIDL_NDK_PLATFORM_BACKEND)))
+$(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
+
$(call json_end)
$(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 61c07ba603..570702a679 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,10 +47,18 @@ define generate-common-build-props
echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
)\
- $(if $(filter system vendor odm,$(1)),\
- echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST) " >> $(2);\
- echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
- echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ $(if $(filter true,$(ZYGOTE_FORCE_64)),\
+ $(if $(filter vendor,$(1)),\
+ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist32=" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ )\
+ ,\
+ $(if $(filter system vendor odm,$(1)),\
+ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
+ echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+ )\
)\
echo "ro.$(1).build.date=`$(DATE_FROM_FILE)`" >> $(2);\
echo "ro.$(1).build.date.utc=`$(DATE_FROM_FILE) +%s`" >> $(2);\
@@ -282,6 +290,7 @@ $(gen_from_buildinfo_sh): $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) | $(B
TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
+ ZYGOTE_FORCE_64_BIT="$(ZYGOTE_FORCE_64_BIT)" \
bash $(BUILDINFO_SH) > $@
ifdef TARGET_SYSTEM_PROP
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index cd67ad6a9b..ce25ee2255 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -104,7 +104,7 @@ ifndef PLATFORM_SECURITY_PATCH
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2022-06-05
+ PLATFORM_SECURITY_PATCH := 2022-07-05
endif
.KATI_READONLY := PLATFORM_SECURITY_PATCH
diff --git a/envsetup.sh b/envsetup.sh
index 8856212004..ea28c2ea42 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -455,10 +455,19 @@ function multitree_lunch()
{
local code
local results
+ # Lunch must be run in the topdir, but this way we get a clear error
+ # message, instead of FileNotFound.
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/build/build/make/orchestrator/core/orchestrator.py" "$@"
+ else
+ _multitree_lunch_error
+ return 1
+ fi
if $(echo "$1" | grep -q '^-') ; then
# Calls starting with a -- argument are passed directly and the function
# returns with the lunch.py exit code.
- build/build/make/orchestrator/core/lunch.py "$@"
+ "${T}/build/build/make/orchestrator/core/lunch.py" "$@"
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -469,7 +478,7 @@ function multitree_lunch()
fi
else
# All other calls go through the --lunch variant of lunch.py
- results=($(build/build/make/orchestrator/core/lunch.py --lunch "$@"))
+ results=($(${T}/build/build/make/orchestrator/core/lunch.py --lunch "$@"))
code=$?
if [[ $code -eq 2 ]] ; then
echo 1>&2
@@ -1813,7 +1822,8 @@ function _wrap_build()
function _trigger_build()
(
local -r bc="$1"; shift
- if T="$(gettop)"; then
+ local T=$(gettop)
+ if [ -n "$T" ]; then
_wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
else
>&2 echo "Couldn't locate the top of the tree. Try setting TOP."
@@ -1873,8 +1883,9 @@ function _multitree_lunch_error()
function multitree_build()
{
- if T="$(multitree_gettop)"; then
- "$T/build/build/orchestrator/core/orchestrator.py" "$@"
+ local T=$(multitree_gettop)
+ if [ -n "$T" ]; then
+ "$T/build/build/make/orchestrator/core/orchestrator.py" "$@"
else
_multitree_lunch_error
return 1
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
index 12b096fa5d..ce90ac0ba0 100755
--- a/finalize_branch_for_release.sh
+++ b/finalize_branch_for_release.sh
@@ -16,15 +16,23 @@ function finalize_main() {
# Update references in the codebase to new API version (TODO)
# ...
- AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api
+ AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
+
+ # Generate ABI dumps
+ ANDROID_BUILD_TOP="$top" \
+ out/host/linux-x86/bin/create_reference_dumps \
+ -p aosp_arm64 --build-variant user
# Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
$m check-vndk-list || \
{ cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
- # for now, we simulate the release state for AIDL, but in the future, we would want
- # to actually turn the branch into the REL state and test with that
- AIDL_FROZEN_REL=true $m nothing # test build
+ # This command tests:
+ # The release state for AIDL.
+ # ABI difference between user and userdebug builds.
+ # In the future, we would want to actually turn the branch into the REL
+ # state and test with that.
+ AIDL_FROZEN_REL=true $m droidcore
# Build SDK (TODO)
# lunch sdk...
diff --git a/orchestrator/README b/orchestrator/README
deleted file mode 100644
index 9a1e302394..0000000000
--- a/orchestrator/README
+++ /dev/null
@@ -1,8 +0,0 @@
-DEMO
-
-from the root of the workspace
-
-multitree_lunch build/build/make/orchestrator/test_workspace/combo.mcombo eng
-
-rm -rf out && multitree_build && echo "==== Files ====" && find out -type f
-
diff --git a/orchestrator/core/api_assembly.py b/orchestrator/core/api_assembly.py
deleted file mode 100644
index d7abef72fc..0000000000
--- a/orchestrator/core/api_assembly.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import json
-import os
-import sys
-
-import api_assembly_cc
-import ninja_tools
-
-
-ContributionData = collections.namedtuple("ContributionData", ("inner_tree", "json_data"))
-
-def assemble_apis(context, inner_trees):
- # Find all of the contributions from the inner tree
- contribution_files_dict = inner_trees.for_each_tree(api_contribution_files_for_inner_tree)
-
- # Load and validate the contribution files
- # TODO: Check timestamps and skip unnecessary work
- contributions = []
- for tree_key, filenames in contribution_files_dict.items():
- for filename in filenames:
- json_data = load_contribution_file(context, filename)
- if not json_data:
- continue
- # TODO: Validate the configs, especially that the domains match what we asked for
- # from the lunch config.
- contributions.append(ContributionData(inner_trees.get(tree_key), json_data))
-
- # Group contributions by language and API surface
- stub_libraries = collate_contributions(contributions)
-
- # Initialize the ninja file writer
- with open(context.out.api_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Initialize the build file writer
- build_file = BuildFile() # TODO: parameters?
-
- # Iterate through all of the stub libraries and generate rules to assemble them
- # and Android.bp/BUILD files to make those available to inner trees.
- # TODO: Parallelize? Skip unnecessary work?
- for stub_library in stub_libraries:
- STUB_LANGUAGE_HANDLERS[stub_library.language](context, ninja, build_file, stub_library)
-
- # TODO: Handle host_executables separately or as a StubLibrary language?
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def api_contribution_files_for_inner_tree(tree_key, inner_tree, cookie):
- "Scan an inner_tree's out dir for the api contribution files."
- directory = inner_tree.out.api_contributions_dir()
- result = []
- with os.scandir(directory) as it:
- for dirent in it:
- if not dirent.is_file():
- break
- if dirent.name.endswith(".json"):
- result.append(os.path.join(directory, dirent.name))
- return result
-
-
-def load_contribution_file(context, filename):
- "Load and return the API contribution at filename. On error report error and return None."
- with open(filename) as f:
- try:
- return json.load(f)
- except json.decoder.JSONDecodeError as ex:
- # TODO: Error reporting
- context.errors.error(ex.msg, filename, ex.lineno, ex.colno)
- raise ex
-
-
-class StubLibraryContribution(object):
- def __init__(self, inner_tree, api_domain, library_contribution):
- self.inner_tree = inner_tree
- self.api_domain = api_domain
- self.library_contribution = library_contribution
-
-
-class StubLibrary(object):
- def __init__(self, language, api_surface, api_surface_version, name):
- self.language = language
- self.api_surface = api_surface
- self.api_surface_version = api_surface_version
- self.name = name
- self.contributions = []
-
- def add_contribution(self, contrib):
- self.contributions.append(contrib)
-
-
-def collate_contributions(contributions):
- """Take the list of parsed API contribution files, and group targets by API Surface, version,
- language and library name, and return a StubLibrary object for each of those.
- """
- grouped = {}
- for contribution in contributions:
- for language in STUB_LANGUAGE_HANDLERS.keys():
- for library in contribution.json_data.get(language, []):
- key = (language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- stub_library = grouped.get(key)
- if not stub_library:
- stub_library = StubLibrary(language, contribution.json_data["name"],
- contribution.json_data["version"], library["name"])
- grouped[key] = stub_library
- stub_library.add_contribution(StubLibraryContribution(contribution.inner_tree,
- contribution.json_data["api_domain"], library))
- return list(grouped.values())
-
-
-def assemble_java_api_library(context, ninja, build_file, stub_library):
- print("assembling java_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-def assemble_resource_api_library(context, ninja, build_file, stub_library):
- print("assembling resource_api_library %s-%s %s from:" % (stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name))
- for contrib in stub_library.contributions:
- print(" %s %s" % (contrib.api_domain, contrib.library_contribution["api"]))
- # TODO: Implement me
-
-
-STUB_LANGUAGE_HANDLERS = {
- "cc_libraries": api_assembly_cc.assemble_cc_api_library,
- "java_libraries": assemble_java_api_library,
- "resource_libraries": assemble_resource_api_library,
-}
-
-
-class BuildFile(object):
- "Abstract generator for Android.bp files and BUILD files."
- pass
-
-
diff --git a/orchestrator/core/api_assembly_cc.py b/orchestrator/core/api_assembly_cc.py
deleted file mode 100644
index ca9b2a4e81..0000000000
--- a/orchestrator/core/api_assembly_cc.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-def assemble_cc_api_library(context, ninja, build_file, stub_library):
- staging_dir = context.out.api_library_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
- work_dir = context.out.api_library_work_dir(stub_library.api_surface,
- stub_library.api_surface_version, stub_library.name)
-
- # Generate rules to copy headers
- includes = []
- include_dir = os.path.join(staging_dir, "include")
- for contrib in stub_library.contributions:
- for headers in contrib.library_contribution["headers"]:
- root = headers["root"]
- for file in headers["files"]:
- # TODO: Deal with collisions of the same name from multiple contributions
- include = os.path.join(include_dir, file)
- ninja.add_copy_file(include, os.path.join(contrib.inner_tree.root, root, file))
- includes.append(include)
-
- # Generate rule to run ndkstubgen
-
-
- # Generate rule to compile stubs to library
-
- # Generate phony rule to build the library
- # TODO: This name probably conflictgs with something
- ninja.add_phony("-".join((stub_library.api_surface, str(stub_library.api_surface_version),
- stub_library.name)), includes)
-
- # Generate build files
-
diff --git a/orchestrator/core/api_domain.py b/orchestrator/core/api_domain.py
deleted file mode 100644
index bb7306c34a..0000000000
--- a/orchestrator/core/api_domain.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-class ApiDomain(object):
- def __init__(self, name, tree, product):
- # Product will be null for modules
- self.name = name
- self.tree = tree
- self.product = product
-
- def __str__(self):
- return "ApiDomain(name=\"%s\" tree.root=\"%s\" product=%s)" % (
- self.name, self.tree.root,
- "None" if self.product is None else "\"%s\"" % self.product)
-
diff --git a/orchestrator/core/api_export.py b/orchestrator/core/api_export.py
deleted file mode 100644
index 2f26b02b61..0000000000
--- a/orchestrator/core/api_export.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def export_apis_from_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["export_api_contributions"])
-
-
diff --git a/orchestrator/core/final_packaging.py b/orchestrator/core/final_packaging.py
deleted file mode 100644
index 03fe890f75..0000000000
--- a/orchestrator/core/final_packaging.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import sys
-
-import ninja_tools
-import ninja_syntax # Has to be after ninja_tools because of the path hack
-
-def final_packaging(context, inner_trees):
- """Pull together all of the previously defined rules into the final build stems."""
-
- with open(context.out.outer_ninja_file(), "w") as ninja_file:
- ninja = ninja_tools.Ninja(context, ninja_file)
-
- # Add the api surfaces file
- ninja.add_subninja(ninja_syntax.Subninja(context.out.api_ninja_file(), chDir=None))
-
- # For each inner tree
- for tree in inner_trees.keys():
- # TODO: Verify that inner_tree.ninja was generated
-
- # Read and verify file
- build_targets = read_build_targets_json(context, tree)
- if not build_targets:
- continue
-
- # Generate the ninja and build files for this inner tree
- generate_cross_domain_build_rules(context, ninja, tree, build_targets)
-
- # Finish writing the ninja file
- ninja.write()
-
-
-def read_build_targets_json(context, tree):
- """Read and validate the build_targets.json file for the given tree."""
- try:
- f = open(tree.out.build_targets_file())
- except FileNotFoundError:
- # It's allowed not to have any artifacts (e.g. if a tree is a light tree with only APIs)
- return None
-
- data = None
- with f:
- try:
- data = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- sys.stderr.write("Error parsing file: %s\n" % tree.out.build_targets_file())
- # TODO: Error reporting
- raise ex
-
- # TODO: Better error handling
- # TODO: Validate json schema
- return data
-
-
-def generate_cross_domain_build_rules(context, ninja, tree, build_targets):
- "Generate the ninja and build files for the inner tree."
- # Include the inner tree's inner_tree.ninja
- ninja.add_subninja(ninja_syntax.Subninja(tree.out.main_ninja_file(), chDir=tree.root))
-
- # Generate module rules and files
- for module in build_targets.get("modules", []):
- generate_shared_module(context, ninja, tree, module)
-
- # Generate staging rules
- staging_dir = context.out.staging_dir()
- for staged in build_targets.get("staging", []):
- # TODO: Enforce that dest isn't in disallowed subdir of out or absolute
- dest = staged["dest"]
- dest = os.path.join(staging_dir, dest)
- if "src" in staged and "obj" in staged:
- context.errors.error("Can't have both \"src\" and \"obj\" tags in \"staging\" entry."
- ) # TODO: Filename and line if possible
- if "src" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.root, staged["src"]))
- elif "obj" in staged:
- ninja.add_copy_file(dest, os.path.join(tree.out.root(), staged["obj"]))
- ninja.add_global_phony("staging", [dest])
-
- # Generate dist rules
- dist_dir = context.out.dist_dir()
- for disted in build_targets.get("dist", []):
- # TODO: Enforce that dest absolute
- dest = disted["dest"]
- dest = os.path.join(dist_dir, dest)
- ninja.add_copy_file(dest, os.path.join(tree.root, disted["src"]))
- ninja.add_global_phony("dist", [dest])
-
-
-def generate_shared_module(context, ninja, tree, module):
- """Generate ninja rules for the given build_targets.json defined module."""
- module_name = module["name"]
- module_type = module["type"]
- share_dir = context.out.module_share_dir(module_type, module_name)
- src_file = os.path.join(tree.root, module["file"])
-
- if module_type == "apex":
- ninja.add_copy_file(os.path.join(share_dir, module_name + ".apex"), src_file)
- # TODO: Generate build file
-
- else:
- # TODO: Better error handling
- raise Exception("Invalid module type: %s" % module)
diff --git a/orchestrator/core/inner_tree.py b/orchestrator/core/inner_tree.py
deleted file mode 100644
index d348ee7f98..0000000000
--- a/orchestrator/core/inner_tree.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-import textwrap
-
-class InnerTreeKey(object):
- """Trees are identified uniquely by their root and the TARGET_PRODUCT they will use to build.
- If a single tree uses two different prdoucts, then we won't make assumptions about
- them sharing _anything_.
- TODO: This is true for soong. It's more likely that bazel could do analysis for two
- products at the same time in a single tree, so there's an optimization there to do
- eventually."""
- def __init__(self, root, product):
- self.root = root
- self.product = product
-
- def __str__(self):
- return "TreeKey(root=%s product=%s)" % (enquote(self.root), enquote(self.product))
-
- def __hash__(self):
- return hash((self.root, self.product))
-
- def _cmp(self, other):
- if self.root < other.root:
- return -1
- if self.root > other.root:
- return 1
- if self.product == other.product:
- return 0
- if self.product is None:
- return -1
- if other.product is None:
- return 1
- if self.product < other.product:
- return -1
- return 1
-
- def __eq__(self, other):
- return self._cmp(other) == 0
-
- def __ne__(self, other):
- return self._cmp(other) != 0
-
- def __lt__(self, other):
- return self._cmp(other) < 0
-
- def __le__(self, other):
- return self._cmp(other) <= 0
-
- def __gt__(self, other):
- return self._cmp(other) > 0
-
- def __ge__(self, other):
- return self._cmp(other) >= 0
-
-
-class InnerTree(object):
- def __init__(self, context, root, product):
- """Initialize with the inner tree root (relative to the workspace root)"""
- self.root = root
- self.product = product
- self.domains = {}
- # TODO: Base directory on OUT_DIR
- out_root = context.out.inner_tree_dir(root)
- if product:
- out_root += "_" + product
- else:
- out_root += "_unbundled"
- self.out = OutDirLayout(out_root)
-
- def __str__(self):
- return "InnerTree(root=%s product=%s domains=[%s])" % (enquote(self.root),
- enquote(self.product),
- " ".join([enquote(d) for d in sorted(self.domains.keys())]))
-
- def invoke(self, args):
- """Call the inner tree command for this inner tree. Exits on failure."""
- # TODO: Build time tracing
-
- # Validate that there is a .inner_build command to run at the root of the tree
- # so we can print a good error message
- inner_build_tool = os.path.join(self.root, ".inner_build")
- if not os.access(inner_build_tool, os.X_OK):
- sys.stderr.write(("Unable to execute %s. Is there an inner tree or lunch combo"
- + " misconfiguration?\n") % inner_build_tool)
- sys.exit(1)
-
- # TODO: This is where we should set up the shared trees
-
- # Build the command
- cmd = [inner_build_tool, "--out_dir", self.out.root()]
- for domain_name in sorted(self.domains.keys()):
- cmd.append("--api_domain")
- cmd.append(domain_name)
- cmd += args
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in inner tree: %s\nstopping multitree build.\n"
- % self.root)
- sys.exit(1)
-
-
-class InnerTrees(object):
- def __init__(self, trees, domains):
- self.trees = trees
- self.domains = domains
-
- def __str__(self):
- "Return a debugging dump of this object"
- return textwrap.dedent("""\
- InnerTrees {
- trees: [
- %(trees)s
- ]
- domains: [
- %(domains)s
- ]
- }""" % {
- "trees": "\n ".join(sorted([str(t) for t in self.trees.values()])),
- "domains": "\n ".join(sorted([str(d) for d in self.domains.values()])),
- })
-
-
- def for_each_tree(self, func, cookie=None):
- """Call func for each of the inner trees once for each product that will be built in it.
-
- The calls will be in a stable order.
-
- Return a map of the InnerTreeKey to any results returned from func().
- """
- result = {}
- for key in sorted(self.trees.keys()):
- result[key] = func(key, self.trees[key], cookie)
- return result
-
-
- def get(self, tree_key):
- """Get an inner tree for tree_key"""
- return self.trees.get(tree_key)
-
- def keys(self):
- "Get the keys for the inner trees in name order."
- return [self.trees[k] for k in sorted(self.trees.keys())]
-
-
-class OutDirLayout(object):
- """Encapsulates the logic about the layout of the inner tree out directories.
- See also context.OutDir for outer tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the inner tree."
- self._root = root
-
- def root(self):
- return self._root
-
- def tree_info_file(self):
- return os.path.join(self._root, "tree_info.json")
-
- def api_contributions_dir(self):
- return os.path.join(self._root, "api_contributions")
-
- def build_targets_file(self):
- return os.path.join(self._root, "build_targets.json")
-
- def main_ninja_file(self):
- return os.path.join(self._root, "inner_tree.ninja")
-
-
-def enquote(s):
- return "None" if s is None else "\"%s\"" % s
-
-
diff --git a/orchestrator/core/interrogate.py b/orchestrator/core/interrogate.py
deleted file mode 100644
index 9fe769e5ef..0000000000
--- a/orchestrator/core/interrogate.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-
-def interrogate_tree(tree_key, inner_tree, cookie):
- inner_tree.invoke(["describe"])
-
- info_json_filename = inner_tree.out.tree_info_file()
-
- # TODO: Error handling
- with open(info_json_filename) as f:
- info_json = json.load(f)
-
- # TODO: Check orchestrator protocol
-
diff --git a/orchestrator/core/lunch.py b/orchestrator/core/lunch.py
deleted file mode 100755
index a6484787e1..0000000000
--- a/orchestrator/core/lunch.py
+++ /dev/null
@@ -1,408 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import glob
-import json
-import os
-import sys
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-EXIT_STATUS_NEED_HELP = 2
-
-
-def find_dirs(path, name, ttl=6):
- """Search at most ttl directories deep inside path for a directory called name
- and yield directories that match."""
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_dir():
- if dirent.name == name:
- yield os.path.join(path, dirent.name)
- elif ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in subdirs:
- yield from find_dirs(os.path.join(path, subdir), name, ttl-1)
-
-
-def walk_paths(path, matcher, ttl=10):
- """Do a traversal of all files under path yielding each file that matches
- matcher."""
- # First look for files, then recurse into directories as needed.
- # The dance with subdirs is so that we recurse in sorted order.
- subdirs = []
- with os.scandir(path) as it:
- for dirent in sorted(it, key=lambda x: x.name):
- try:
- if dirent.is_file():
- if matcher(dirent.name):
- yield os.path.join(path, dirent.name)
- if dirent.is_dir():
- if ttl > 0:
- subdirs.append(dirent.name)
- except OSError:
- # Consume filesystem errors, e.g. too many links, permission etc.
- pass
- for subdir in sorted(subdirs):
- yield from walk_paths(os.path.join(path, subdir), matcher, ttl-1)
-
-
-def find_file(path, filename):
- """Return a file called filename inside path, no more than ttl levels deep.
-
- Directories are searched alphabetically.
- """
- for f in walk_paths(path, lambda x: x == filename):
- return f
-
-
-def find_config_dirs(workspace_root):
- """Find the configuration files in the well known locations inside workspace_root
-
- <workspace_root>/build/build/orchestrator/multitree_combos
- (AOSP devices, such as cuttlefish)
-
- <workspace_root>/vendor/**/multitree_combos
- (specific to a vendor and not open sourced)
-
- <workspace_root>/device/**/multitree_combos
- (specific to a vendor and are open sourced)
-
- Directories are returned specifically in this order, so that aosp can't be
- overridden, but vendor overrides device.
- """
- # TODO: This is not looking in inner trees correctly.
-
- # TODO: When orchestrator is in its own git project remove the "make/" here
- yield os.path.join(workspace_root, "build/build/make/orchestrator/multitree_combos")
-
- dirs = ["vendor", "device"]
- for d in dirs:
- yield from find_dirs(os.path.join(workspace_root, d), "multitree_combos")
-
-
-def find_named_config(workspace_root, shortname):
- """Find the config with the given shortname inside workspace_root.
-
- Config directories are searched in the order described in find_config_dirs,
- and inside those directories, alphabetically."""
- filename = shortname + ".mcombo"
- for config_dir in find_config_dirs(workspace_root):
- found = find_file(config_dir, filename)
- if found:
- return found
- return None
-
-
-def parse_product_variant(s):
- """Split a PRODUCT-VARIANT name, or return None if it doesn't match that pattern."""
- split = s.split("-")
- if len(split) != 2:
- return None
- return split
-
-
-def choose_config_from_args(workspace_root, args):
- """Return the config file we should use for the given argument,
- or null if there's no file that matches that."""
- if len(args) == 1:
- # Prefer PRODUCT-VARIANT syntax so if there happens to be a matching
- # file we don't match that.
- pv = parse_product_variant(args[0])
- if pv:
- config = find_named_config(workspace_root, pv[0])
- if config:
- return (config, pv[1])
- return None, None
- # Look for a specifically named file
- if os.path.isfile(args[0]):
- return (args[0], args[1] if len(args) > 1 else None)
- # That file didn't exist, return that we didn't find it.
- return None, None
-
-
-class ConfigException(Exception):
- ERROR_IDENTIFY = "identify"
- ERROR_PARSE = "parse"
- ERROR_CYCLE = "cycle"
- ERROR_VALIDATE = "validate"
-
- def __init__(self, kind, message, locations=[], line=0):
- """Error thrown when loading and parsing configurations.
-
- Args:
- message: Error message to display to user
- locations: List of filenames of the include history. The 0 index one
- the location where the actual error occurred
- """
- if len(locations):
- s = locations[0]
- if line:
- s += ":"
- s += str(line)
- s += ": "
- else:
- s = ""
- s += message
- if len(locations):
- for loc in locations[1:]:
- s += "\n included from %s" % loc
- super().__init__(s)
- self.kind = kind
- self.message = message
- self.locations = locations
- self.line = line
-
-
-def load_config(filename):
- """Load a config, including processing the inherits fields.
-
- Raises:
- ConfigException on errors
- """
- def load_and_merge(fn, visited):
- with open(fn) as f:
- try:
- contents = json.load(f)
- except json.decoder.JSONDecodeError as ex:
- if True:
- raise ConfigException(ConfigException.ERROR_PARSE, ex.msg, visited, ex.lineno)
- else:
- sys.stderr.write("exception %s" % ex.__dict__)
- raise ex
- # Merge all the parents into one data, with first-wins policy
- inherited_data = {}
- for parent in contents.get("inherits", []):
- if parent in visited:
- raise ConfigException(ConfigException.ERROR_CYCLE, "Cycle detected in inherits",
- visited)
- deep_merge(inherited_data, load_and_merge(parent, [parent,] + visited))
- # Then merge inherited_data into contents, but what's already there will win.
- deep_merge(contents, inherited_data)
- contents.pop("inherits", None)
- return contents
- return load_and_merge(filename, [filename,])
-
-
-def deep_merge(merged, addition):
- """Merge all fields of addition into merged. Pre-existing fields win."""
- for k, v in addition.items():
- if k in merged:
- if isinstance(v, dict) and isinstance(merged[k], dict):
- deep_merge(merged[k], v)
- else:
- merged[k] = v
-
-
-def make_config_header(config_file, config, variant):
- def make_table(rows):
- maxcols = max([len(row) for row in rows])
- widths = [0] * maxcols
- for row in rows:
- for i in range(len(row)):
- widths[i] = max(widths[i], len(row[i]))
- text = []
- for row in rows:
- rowtext = []
- for i in range(len(row)):
- cell = row[i]
- rowtext.append(str(cell))
- rowtext.append(" " * (widths[i] - len(cell)))
- rowtext.append(" ")
- text.append("".join(rowtext))
- return "\n".join(text)
-
- trees = [("Component", "Path", "Product"),
- ("---------", "----", "-------")]
- entry = config.get("system", None)
- def add_config_tuple(trees, entry, name):
- if entry:
- trees.append((name, entry.get("tree"), entry.get("product", "")))
- add_config_tuple(trees, config.get("system"), "system")
- add_config_tuple(trees, config.get("vendor"), "vendor")
- for k, v in config.get("modules", {}).items():
- add_config_tuple(trees, v, k)
-
- return """========================================
-TARGET_BUILD_COMBO=%(TARGET_BUILD_COMBO)s
-TARGET_BUILD_VARIANT=%(TARGET_BUILD_VARIANT)s
-
-%(trees)s
-========================================\n""" % {
- "TARGET_BUILD_COMBO": config_file,
- "TARGET_BUILD_VARIANT": variant,
- "trees": make_table(trees),
- }
-
-
-def do_lunch(args):
- """Handle the lunch command."""
- # Check that we're at the top of a multitree workspace by seeing if this script exists.
- if not os.path.exists("build/build/make/orchestrator/core/lunch.py"):
- sys.stderr.write("ERROR: lunch.py must be run from the root of a multi-tree workspace\n")
- return EXIT_STATUS_ERROR
-
- # Choose the config file
- config_file, variant = choose_config_from_args(".", args)
-
- if config_file == None:
- sys.stderr.write("Can't find lunch combo file for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
- if variant == None:
- sys.stderr.write("Can't find variant for: %s\n" % " ".join(args))
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Fail if the lunchable bit isn't set, because this isn't a usable config
- if not config.get("lunchable", False):
- sys.stderr.write("%s: Lunch config file (or inherited files) does not have the 'lunchable'"
- % config_file)
- sys.stderr.write(" flag set, which means it is probably not a complete lunch spec.\n")
-
- # All the validation has passed, so print the name of the file and the variant
- sys.stdout.write("%s\n" % config_file)
- sys.stdout.write("%s\n" % variant)
-
- # Write confirmation message to stderr
- sys.stderr.write(make_config_header(config_file, config, variant))
-
- return EXIT_STATUS_OK
-
-
-def find_all_combo_files(workspace_root):
- """Find all .mcombo files in the prescribed locations in the tree."""
- for dir in find_config_dirs(workspace_root):
- for file in walk_paths(dir, lambda x: x.endswith(".mcombo")):
- yield file
-
-
-def is_file_lunchable(config_file):
- """Parse config_file, flatten the inheritance, and return whether it can be
- used as a lunch target."""
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write("%s" % ex)
- return False
- return config.get("lunchable", False)
-
-
-def find_all_lunchable(workspace_root):
- """Find all mcombo files in the tree (rooted at workspace_root) that when
- parsed (and inheritance is flattened) have lunchable: true."""
- for f in [x for x in find_all_combo_files(workspace_root) if is_file_lunchable(x)]:
- yield f
-
-
-def load_current_config():
- """Load, validate and return the config as specified in TARGET_BUILD_COMBO. Throws
- ConfigException if there is a problem."""
-
- # Identify the config file
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- raise ConfigException(ConfigException.ERROR_IDENTIFY,
- "TARGET_BUILD_COMBO not set. Run lunch or pass a combo file.")
-
- # Parse the config file
- config = load_config(config_file)
-
- # Validate the config file
- if not config.get("lunchable", False):
- raise ConfigException(ConfigException.ERROR_VALIDATE,
- "Lunch config file (or inherited files) does not have the 'lunchable'"
- + " flag set, which means it is probably not a complete lunch spec.",
- [config_file,])
-
- # TODO: Validate that:
- # - there are no modules called system or vendor
- # - everything has all the required files
-
- variant = os.environ.get("TARGET_BUILD_VARIANT")
- if not variant:
- variant = "eng" # TODO: Is this the right default?
- # Validate variant is user, userdebug or eng
-
- return config_file, config, variant
-
-def do_list():
- """Handle the --list command."""
- for f in sorted(find_all_lunchable(".")):
- print(f)
-
-
-def do_print(args):
- """Handle the --print command."""
- # Parse args
- if len(args) == 0:
- config_file = os.environ.get("TARGET_BUILD_COMBO")
- if not config_file:
- sys.stderr.write("TARGET_BUILD_COMBO not set. Run lunch before building.\n")
- return EXIT_STATUS_NEED_HELP
- elif len(args) == 1:
- config_file = args[0]
- else:
- return EXIT_STATUS_NEED_HELP
-
- # Parse the config file
- try:
- config = load_config(config_file)
- except ConfigException as ex:
- sys.stderr.write(str(ex))
- return EXIT_STATUS_ERROR
-
- # Print the config in json form
- json.dump(config, sys.stdout, indent=4)
-
- return EXIT_STATUS_OK
-
-
-def main(argv):
- if len(argv) < 2 or argv[1] == "-h" or argv[1] == "--help":
- return EXIT_STATUS_NEED_HELP
-
- if len(argv) == 2 and argv[1] == "--list":
- do_list()
- return EXIT_STATUS_OK
-
- if len(argv) == 2 and argv[1] == "--print":
- return do_print(argv[2:])
- return EXIT_STATUS_OK
-
- if (len(argv) == 3 or len(argv) == 4) and argv[1] == "--lunch":
- return do_lunch(argv[2:])
-
- sys.stderr.write("Unknown lunch command: %s\n" % " ".join(argv[1:]))
- return EXIT_STATUS_NEED_HELP
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/ninja_runner.py b/orchestrator/core/ninja_runner.py
deleted file mode 100644
index ab81d66410..0000000000
--- a/orchestrator/core/ninja_runner.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import sys
-
-def run_ninja(context, targets):
- """Run ninja.
- """
-
- # Construct the command
- cmd = [
- context.tools.ninja(),
- "-f",
- context.out.outer_ninja_file(),
- ] + targets
-
- # Run the command
- process = subprocess.run(cmd, shell=False)
-
- # TODO: Probably want better handling of inner tree failures
- if process.returncode:
- sys.stderr.write("Build error in outer tree.\nstopping multitree build.\n")
- sys.exit(1)
-
diff --git a/orchestrator/core/ninja_tools.py b/orchestrator/core/ninja_tools.py
deleted file mode 100644
index 16101eab1d..0000000000
--- a/orchestrator/core/ninja_tools.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-
-# Workaround for python include path
-_ninja_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "ninja"))
-if _ninja_dir not in sys.path:
- sys.path.append(_ninja_dir)
-import ninja_writer
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-
-class Ninja(ninja_writer.Writer):
- """Some higher level constructs on top of raw ninja writing.
- TODO: Not sure where these should be."""
- def __init__(self, context, file):
- super(Ninja, self).__init__(file)
- self._context = context
- self._did_copy_file = False
- self._phonies = {}
-
- def add_copy_file(self, copy_to, copy_from):
- if not self._did_copy_file:
- self._did_copy_file = True
- rule = Rule("copy_file")
- rule.add_variable("command", "mkdir -p ${out_dir} && " + self._context.tools.acp()
- + " -f ${in} ${out}")
- self.add_rule(rule)
- build_action = BuildAction(copy_to, "copy_file", inputs=[copy_from,],
- implicits=[self._context.tools.acp()])
- build_action.add_variable("out_dir", os.path.dirname(copy_to))
- self.add_build_action(build_action)
-
- def add_global_phony(self, name, deps):
- """Add a phony target where there are multiple places that will want to add to
- the same phony. If you can, to save memory, use add_phony instead of this function."""
- if type(deps) not in (list, tuple):
- raise Exception("Assertion failed: bad type of deps: %s" % type(deps))
- self._phonies.setdefault(name, []).extend(deps)
-
- def write(self):
- for phony, deps in self._phonies.items():
- self.add_phony(phony, deps)
- super(Ninja, self).write()
-
-
diff --git a/orchestrator/core/orchestrator.py b/orchestrator/core/orchestrator.py
deleted file mode 100755
index 508f73aabb..0000000000
--- a/orchestrator/core/orchestrator.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import sys
-
-sys.dont_write_bytecode = True
-import api_assembly
-import api_domain
-import api_export
-import final_packaging
-import inner_tree
-import tree_analysis
-import interrogate
-import lunch
-import ninja_runner
-import utils
-
-EXIT_STATUS_OK = 0
-EXIT_STATUS_ERROR = 1
-
-API_DOMAIN_SYSTEM = "system"
-API_DOMAIN_VENDOR = "vendor"
-API_DOMAIN_MODULE = "module"
-
-def process_config(context, lunch_config):
- """Returns a InnerTrees object based on the configuration requested in the lunch config."""
- def add(domain_name, tree_root, product):
- tree_key = inner_tree.InnerTreeKey(tree_root, product)
- if tree_key in trees:
- tree = trees[tree_key]
- else:
- tree = inner_tree.InnerTree(context, tree_root, product)
- trees[tree_key] = tree
- domain = api_domain.ApiDomain(domain_name, tree, product)
- domains[domain_name] = domain
- tree.domains[domain_name] = domain
-
- trees = {}
- domains = {}
-
- system_entry = lunch_config.get("system")
- if system_entry:
- add(API_DOMAIN_SYSTEM, system_entry["tree"], system_entry["product"])
-
- vendor_entry = lunch_config.get("vendor")
- if vendor_entry:
- add(API_DOMAIN_VENDOR, vendor_entry["tree"], vendor_entry["product"])
-
- for module_name, module_entry in lunch_config.get("modules", []).items():
- add(module_name, module_entry["tree"], None)
-
- return inner_tree.InnerTrees(trees, domains)
-
-
-def build():
- # Choose the out directory, set up error handling, etc.
- context = utils.Context(utils.choose_out_dir(), utils.Errors(sys.stderr))
-
- # Read the lunch config file
- try:
- config_file, config, variant = lunch.load_current_config()
- except lunch.ConfigException as ex:
- sys.stderr.write("%s\n" % ex)
- return EXIT_STATUS_ERROR
- sys.stdout.write(lunch.make_config_header(config_file, config, variant))
-
- # Construct the trees and domains dicts
- inner_trees = process_config(context, config)
-
- # 1. Interrogate the trees
- inner_trees.for_each_tree(interrogate.interrogate_tree)
- # TODO: Detect bazel-only mode
-
- # 2a. API Export
- inner_trees.for_each_tree(api_export.export_apis_from_tree)
-
- # 2b. API Surface Assembly
- api_assembly.assemble_apis(context, inner_trees)
-
- # 3a. Inner tree analysis
- tree_analysis.analyze_trees(context, inner_trees)
-
- # 3b. Final Packaging Rules
- final_packaging.final_packaging(context, inner_trees)
-
- # 4. Build Execution
- # TODO: Decide what we want the UX for selecting targets to be across
- # branches... since there are very likely to be conflicting soong short
- # names.
- print("Running ninja...")
- targets = ["staging", "system"]
- ninja_runner.run_ninja(context, targets)
-
- # Success!
- return EXIT_STATUS_OK
-
-def main(argv):
- return build()
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/test/configs/another/bad.mcombo b/orchestrator/core/test/configs/another/bad.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/another/bad.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/another/dir/a b/orchestrator/core/test/configs/another/dir/a
deleted file mode 100644
index 7898192261..0000000000
--- a/orchestrator/core/test/configs/another/dir/a
+++ /dev/null
@@ -1 +0,0 @@
-a
diff --git a/orchestrator/core/test/configs/b-eng b/orchestrator/core/test/configs/b-eng
deleted file mode 100644
index eceb3f31f9..0000000000
--- a/orchestrator/core/test/configs/b-eng
+++ /dev/null
@@ -1 +0,0 @@
-INVALID FILE
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
deleted file mode 100644
index 8cc83702da..0000000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/b.mcombo
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "lunchable": "true"
-}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt b/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
deleted file mode 100644
index f9805f279b..0000000000
--- a/orchestrator/core/test/configs/build/make/orchestrator/multitree_combos/not_a_combo.txt
+++ /dev/null
@@ -1 +0,0 @@
-not a combo file
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/d.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/device/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29bb2..0000000000
--- a/orchestrator/core/test/configs/device/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test/configs/parsing/cycles/1.mcombo b/orchestrator/core/test/configs/parsing/cycles/1.mcombo
deleted file mode 100644
index ab8fe3307a..0000000000
--- a/orchestrator/core/test/configs/parsing/cycles/1.mcombo
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/2.mcombo"
- ]
-}
diff --git a/orchestrator/core/test/configs/parsing/cycles/2.mcombo b/orchestrator/core/test/configs/parsing/cycles/2.mcombo
deleted file mode 100644
index 2b774d0395..0000000000
--- a/orchestrator/core/test/configs/parsing/cycles/2.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/3.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/cycles/3.mcombo b/orchestrator/core/test/configs/parsing/cycles/3.mcombo
deleted file mode 100644
index 41b629b9e8..0000000000
--- a/orchestrator/core/test/configs/parsing/cycles/3.mcombo
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/cycles/1.mcombo"
- ]
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/1.mcombo b/orchestrator/core/test/configs/parsing/merge/1.mcombo
deleted file mode 100644
index a5a57d7cd1..0000000000
--- a/orchestrator/core/test/configs/parsing/merge/1.mcombo
+++ /dev/null
@@ -1,13 +0,0 @@
-{
- "inherits": [
- "test/configs/parsing/merge/2.mcombo",
- "test/configs/parsing/merge/3.mcombo"
- ],
- "in_1": "1",
- "in_1_2": "1",
- "merged": {
- "merged_1": "1",
- "merged_1_2": "1"
- },
- "dict_1": { "a" : "b" }
-}
diff --git a/orchestrator/core/test/configs/parsing/merge/2.mcombo b/orchestrator/core/test/configs/parsing/merge/2.mcombo
deleted file mode 100644
index 00963e207f..0000000000
--- a/orchestrator/core/test/configs/parsing/merge/2.mcombo
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "in_1_2": "2",
- "in_2": "2",
- "in_2_3": "2",
- "merged": {
- "merged_1_2": "2",
- "merged_2": "2",
- "merged_2_3": "2"
- },
- "dict_2": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/parsing/merge/3.mcombo b/orchestrator/core/test/configs/parsing/merge/3.mcombo
deleted file mode 100644
index 5fc9d90085..0000000000
--- a/orchestrator/core/test/configs/parsing/merge/3.mcombo
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "in_3": "3",
- "in_2_3": "3",
- "merged": {
- "merged_3": "3",
- "merged_2_3": "3"
- },
- "dict_3": { "a" : "b" }
-}
-
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/b.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo b/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
deleted file mode 100644
index 0967ef424b..0000000000
--- a/orchestrator/core/test/configs/vendor/aa/bb/multitree_combos/v.mcombo
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo b/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
deleted file mode 100644
index e69de29bb2..0000000000
--- a/orchestrator/core/test/configs/vendor/this/one/is/deeper/than/will/be/found/by/the/ttl/multitree_combos/too_deep.mcombo
+++ /dev/null
diff --git a/orchestrator/core/test_lunch.py b/orchestrator/core/test_lunch.py
deleted file mode 100755
index 2d85d05958..0000000000
--- a/orchestrator/core/test_lunch.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import unittest
-
-sys.dont_write_bytecode = True
-import lunch
-
-class TestStringMethods(unittest.TestCase):
-
- def test_find_dirs(self):
- self.assertEqual([x for x in lunch.find_dirs("test/configs", "multitree_combos")], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos"])
-
- def test_find_file(self):
- # Finds the one in device first because this is searching from the root,
- # not using find_named_config.
- self.assertEqual(lunch.find_file("test/configs", "v.mcombo"),
- "test/configs/device/aa/bb/multitree_combos/v.mcombo")
-
- def test_find_config_dirs(self):
- self.assertEqual([x for x in lunch.find_config_dirs("test/configs")], [
- "test/configs/build/make/orchestrator/multitree_combos",
- "test/configs/vendor/aa/bb/multitree_combos",
- "test/configs/device/aa/bb/multitree_combos"])
-
- def test_find_named_config(self):
- # Inside build/orchestrator, overriding device and vendor
- self.assertEqual(lunch.find_named_config("test/configs", "b"),
- "test/configs/build/make/orchestrator/multitree_combos/b.mcombo")
-
- # Nested dir inside a combo dir
- self.assertEqual(lunch.find_named_config("test/configs", "nested"),
- "test/configs/build/make/orchestrator/multitree_combos/nested/nested.mcombo")
-
- # Inside vendor, overriding device
- self.assertEqual(lunch.find_named_config("test/configs", "v"),
- "test/configs/vendor/aa/bb/multitree_combos/v.mcombo")
-
- # Inside device
- self.assertEqual(lunch.find_named_config("test/configs", "d"),
- "test/configs/device/aa/bb/multitree_combos/d.mcombo")
-
- # Make sure we don't look too deep (for performance)
- self.assertIsNone(lunch.find_named_config("test/configs", "too_deep"))
-
-
- def test_choose_config_file(self):
- # Empty string argument
- self.assertEqual(lunch.choose_config_from_args("test/configs", [""]),
- (None, None))
-
- # A PRODUCT-VARIANT name
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["v-eng"]),
- ("test/configs/vendor/aa/bb/multitree_combos/v.mcombo", "eng"))
-
- # A PRODUCT-VARIANT name that conflicts with a file
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["b-eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # A PRODUCT-VARIANT that doesn't exist
- self.assertEqual(lunch.choose_config_from_args("test/configs", ["z-user"]),
- (None, None))
-
- # An explicit file
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", "eng"))
-
- # An explicit file that doesn't exist
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/doesnt_exist.mcombo", "eng"]),
- (None, None))
-
- # An explicit file without a variant should fail
- self.assertEqual(lunch.choose_config_from_args("test/configs",
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"]),
- ("test/configs/build/make/orchestrator/multitree_combos/b.mcombo", None))
-
-
- def test_config_cycles(self):
- # Test that we catch cycles
- with self.assertRaises(lunch.ConfigException) as context:
- lunch.load_config("test/configs/parsing/cycles/1.mcombo")
- self.assertEqual(context.exception.kind, lunch.ConfigException.ERROR_CYCLE)
-
- def test_config_merge(self):
- # Test the merge logic
- self.assertEqual(lunch.load_config("test/configs/parsing/merge/1.mcombo"), {
- "in_1": "1",
- "in_1_2": "1",
- "merged": {"merged_1": "1",
- "merged_1_2": "1",
- "merged_2": "2",
- "merged_2_3": "2",
- "merged_3": "3"},
- "dict_1": {"a": "b"},
- "in_2": "2",
- "in_2_3": "2",
- "dict_2": {"a": "b"},
- "in_3": "3",
- "dict_3": {"a": "b"}
- })
-
- def test_list(self):
- self.assertEqual(sorted(lunch.find_all_lunchable("test/configs")),
- ["test/configs/build/make/orchestrator/multitree_combos/b.mcombo"])
-
-if __name__ == "__main__":
- unittest.main()
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/core/tree_analysis.py b/orchestrator/core/tree_analysis.py
deleted file mode 100644
index 052cad6c34..0000000000
--- a/orchestrator/core/tree_analysis.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-def analyze_trees(context, inner_trees):
- inner_trees.for_each_tree(run_analysis)
-
-def run_analysis(tree_key, inner_tree, cookie):
- inner_tree.invoke(["analyze"])
-
-
-
-
diff --git a/orchestrator/core/utils.py b/orchestrator/core/utils.py
deleted file mode 100644
index 41310e0156..0000000000
--- a/orchestrator/core/utils.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import platform
-
-class Context(object):
- """Mockable container for global state."""
- def __init__(self, out_root, errors):
- self.out = OutDir(out_root)
- self.errors = errors
- self.tools = HostTools()
-
-class TestContext(Context):
- "Context for testing. The real Context is manually constructed in orchestrator.py."
-
- def __init__(self, test_work_dir, test_name):
- super(MockContext, self).__init__(os.path.join(test_work_dir, test_name),
- Errors(None))
-
-
-class OutDir(object):
- """Encapsulates the logic about the out directory at the outer-tree level.
- See also inner_tree.OutDirLayout for inner tree out dir contents."""
-
- def __init__(self, root):
- "Initialize with the root of the OUT_DIR for the outer tree."
- self._out_root = root
- self._intermediates = "intermediates"
-
- def root(self):
- return self._out_root
-
- def inner_tree_dir(self, tree_root):
- """Root directory for inner tree inside the out dir."""
- return os.path.join(self._out_root, "trees", tree_root)
-
- def api_ninja_file(self):
- """The ninja file that assembles API surfaces."""
- return os.path.join(self._out_root, "api_surfaces.ninja")
-
- def api_library_dir(self, surface, version, library):
- """Directory for all the contents of a library inside an API surface, including
- the build files. Any intermediates should go in api_library_work_dir."""
- return os.path.join(self._out_root, "api_surfaces", surface, str(version), library)
-
- def api_library_work_dir(self, surface, version, library):
- """Intermediates / scratch directory for library inside an API surface."""
- return os.path.join(self._out_root, self._intermediates, "api_surfaces", surface,
- str(version), library)
-
- def outer_ninja_file(self):
- return os.path.join(self._out_root, "multitree.ninja")
-
- def module_share_dir(self, module_type, module_name):
- return os.path.join(self._out_root, "shared", module_type, module_name)
-
- def staging_dir(self):
- return os.path.join(self._out_root, "staging")
-
- def dist_dir(self):
- "The DIST_DIR provided or out/dist" # TODO: Look at DIST_DIR
- return os.path.join(self._out_root, "dist")
-
-class Errors(object):
- """Class for reporting and tracking errors."""
- def __init__(self, stream):
- """Initialize Error reporter with a file-like object."""
- self._stream = stream
- self._all = []
-
- def error(self, message, file=None, line=None, col=None):
- """Record the error message."""
- s = ""
- if file:
- s += str(file)
- s += ":"
- if line:
- s += str(line)
- s += ":"
- if col:
- s += str(col)
- s += ":"
- if s:
- s += " "
- s += str(message)
- if s[-1] != "\n":
- s += "\n"
- self._all.append(s)
- if self._stream:
- self._stream.write(s)
-
- def had_error(self):
- """Return if there were any errors reported."""
- return len(self._all)
-
- def get_errors(self):
- """Get all errors that were reported."""
- return self._all
-
-
-class HostTools(object):
- def __init__(self):
- if platform.system() == "Linux":
- self._arch = "linux-x86"
- else:
- raise Exception("Orchestrator running on an unknown system: %s" % platform.system())
-
- # Some of these are called a lot, so pre-compute the strings to save memory
- self._prebuilts = os.path.join("build", "prebuilts", "build-tools", self._arch, "bin")
- self._acp = os.path.join(self._prebuilts, "acp")
- self._ninja = os.path.join(self._prebuilts, "ninja")
-
- def acp(self):
- return self._acp
-
- def ninja(self):
- return self._ninja
-
-
-def choose_out_dir():
- """Get the root of the out dir, either from the environment or by picking
- a default."""
- result = os.environ.get("OUT_DIR")
- if result:
- return result
- else:
- return "out"
diff --git a/orchestrator/demo/buffet_helper.py b/orchestrator/demo/buffet_helper.py
deleted file mode 100644
index fa29aeb1ce..0000000000
--- a/orchestrator/demo/buffet_helper.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-import os
-import sys
-import yaml
-
-from hierarchy import parse_hierarchy
-
-
-def main():
- if len(sys.argv) != 2:
- print('usage: %s target' % sys.argv[0])
- exit(1)
-
- args = sys.argv[1].split('-')
- if len(args) != 2:
- print('target format: {target}-{variant}')
- exit(1)
-
- target, variant = args
-
- if variant not in ['eng', 'user', 'userdebug']:
- print('unknown variant "%s": expected "eng", "user" or "userdebug"' %
- variant)
- exit(1)
-
- build_top = os.getenv('BUFFET_BUILD_TOP')
- if not build_top:
- print('BUFFET_BUILD_TOP is not set; Did you correctly run envsetup.sh?')
- exit(1)
-
- hierarchy_map = parse_hierarchy(build_top)
-
- if target not in hierarchy_map:
- raise RuntimeError(
- "unknown target '%s': couldn't find the target. Supported targets are: %s"
- % (target, list(hierarchy_map.keys())))
-
- hierarchy = [target]
- while hierarchy_map[hierarchy[-1]]:
- hierarchy.append(hierarchy_map[hierarchy[-1]])
-
- print('Target hierarchy for %s: %s' % (target, hierarchy))
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/build_helper.py b/orchestrator/demo/build_helper.py
deleted file mode 100644
index c481f80c9c..0000000000
--- a/orchestrator/demo/build_helper.py
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import copy
-import hierarchy
-import json
-import logging
-import filecmp
-import os
-import shutil
-import subprocess
-import sys
-import tempfile
-import collect_metadata
-import utils
-
-BUILD_CMD_TO_ALL = (
- 'clean',
- 'installclean',
- 'update-meta',
-)
-BUILD_ALL_EXEMPTION = (
- 'art',
-)
-
-def get_supported_product(ctx, supported_products):
- hierarchy_map = hierarchy.parse_hierarchy(ctx.build_top())
- target = ctx.target_product()
-
- while target not in supported_products:
- if target not in hierarchy_map:
- return None
- target = hierarchy_map[target]
- return target
-
-
-def parse_goals(ctx, metadata, goals):
- """Parse goals and returns a map from each component to goals.
-
- e.g.
-
- "m main art timezone:foo timezone:bar" will return the following dict: {
- "main": {"all"},
- "art": {"all"},
- "timezone": {"foo", "bar"},
- }
- """
- # for now, goal should look like:
- # {component} or {component}:{subgoal}
-
- ret = collections.defaultdict(set)
-
- for goal in goals:
- # check if the command is for all components
- if goal in BUILD_CMD_TO_ALL:
- ret['all'].add(goal)
- continue
-
- # should be {component} or {component}:{subgoal}
- try:
- component, subgoal = goal.split(':') if ':' in goal else (goal, 'all')
- except ValueError:
- raise RuntimeError(
- 'unknown goal: %s: should be {component} or {component}:{subgoal}' %
- goal)
- if component not in metadata:
- raise RuntimeError('unknown goal: %s: component %s not found' %
- (goal, component))
- if not get_supported_product(ctx, metadata[component]['lunch_targets']):
- raise RuntimeError("can't find matching target. Supported targets are: " +
- str(metadata[component]['lunch_targets']))
-
- ret[component].add(subgoal)
-
- return ret
-
-
-def find_cycle(metadata):
- """ Finds a cyclic dependency among components.
-
- This is for debugging.
- """
- visited = set()
- parent_node = dict()
- in_stack = set()
-
- # Returns a cycle if one is found
- def dfs(node):
- # visit_order[visit_time[node] - 1] == node
- nonlocal visited, parent_node, in_stack
-
- visited.add(node)
- in_stack.add(node)
- if 'deps' not in metadata[node]:
- in_stack.remove(node)
- return None
- for next in metadata[node]['deps']:
- # We found a cycle (next ~ node) if next is still in the stack
- if next in in_stack:
- cycle = [node]
- while cycle[-1] != next:
- cycle.append(parent_node[cycle[-1]])
- return cycle
-
- # Else, continue searching
- if next in visited:
- continue
-
- parent_node[next] = node
- result = dfs(next)
- if result:
- return result
-
- in_stack.remove(node)
- return None
-
- for component in metadata:
- if component in visited:
- continue
-
- result = dfs(component)
- if result:
- return result
-
- return None
-
-
-def topological_sort_components(metadata):
- """ Performs topological sort on components.
-
- If A depends on B, B appears first.
- """
- # If A depends on B, we want B to appear before A. But the graph in metadata
- # is represented as A -> B (B in metadata[A]['deps']). So we sort in the
- # reverse order, and then reverse the result again to get the desired order.
- indegree = collections.defaultdict(int)
- for component in metadata:
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] += 1
-
- component_queue = collections.deque()
- for component in metadata:
- if indegree[component] == 0:
- component_queue.append(component)
-
- result = []
- while component_queue:
- component = component_queue.popleft()
- result.append(component)
- if 'deps' not in metadata[component]:
- continue
- for dep in metadata[component]['deps']:
- indegree[dep] -= 1
- if indegree[dep] == 0:
- component_queue.append(dep)
-
- # If topological sort fails, there must be a cycle.
- if len(result) != len(metadata):
- cycle = find_cycle(metadata)
- raise RuntimeError('circular dependency found among metadata: %s' % cycle)
-
- return result[::-1]
-
-
-def add_dependency_goals(ctx, metadata, component, goals):
- """ Adds goals that given component depends on."""
- # For now, let's just add "all"
- # TODO: add detailed goals (e.g. API build rules, library build rules, etc.)
- if 'deps' not in metadata[component]:
- return
-
- for dep in metadata[component]['deps']:
- goals[dep].add('all')
-
-
-def sorted_goals_with_dependencies(ctx, metadata, parsed_goals):
- """ Analyzes the dependency graph among components, adds build commands for
-
- dependencies, and then sorts the goals.
-
- Returns a list of tuples: (component_name, set of subgoals).
- Builds should be run in the list's order.
- """
- # TODO(inseob@): after topological sort, some components may be built in
- # parallel.
-
- topological_order = topological_sort_components(metadata)
- combined_goals = copy.deepcopy(parsed_goals)
-
- # Add build rules for each component's dependencies
- # We do this in reverse order, so it can be transitive.
- # e.g. if A depends on B and B depends on C, and we build A,
- # C should also be built, in addition to B.
- for component in topological_order[::-1]:
- if component in combined_goals:
- add_dependency_goals(ctx, metadata, component, combined_goals)
-
- ret = []
- for component in ['all'] + topological_order:
- if component in combined_goals:
- ret.append((component, combined_goals[component]))
-
- return ret
-
-
-def run_build(ctx, metadata, component, subgoals):
- build_cmd = metadata[component]['build_cmd']
- out_dir = metadata[component]['out_dir']
- default_goals = ''
- if 'default_goals' in metadata[component]:
- default_goals = metadata[component]['default_goals']
-
- if 'all' in subgoals:
- goal = default_goals
- else:
- goal = ' '.join(subgoals)
-
- build_vars = ''
- if 'update-meta' in subgoals:
- build_vars = 'TARGET_MULTITREE_UPDATE_META=true'
- # TODO(inseob@): shell escape
- cmd = [
- '/bin/bash', '-c',
- 'source build/envsetup.sh && lunch %s-%s && %s %s %s' %
- (get_supported_product(ctx, metadata[component]['lunch_targets']),
- ctx.target_build_variant(), build_vars, build_cmd, goal)
- ]
- logging.debug('cwd: ' + metadata[component]['path'])
- logging.debug('running build: ' + str(cmd))
-
- subprocess.run(cmd, cwd=metadata[component]['path'], check=True)
-
-
-def run_build_all(ctx, metadata, subgoals):
- for component in metadata:
- if component in BUILD_ALL_EXEMPTION:
- continue
- run_build(ctx, metadata, component, subgoals)
-
-
-def find_components(metadata, predicate):
- for component in metadata:
- if predicate(component):
- yield component
-
-
-def import_filegroups(metadata, component, exporting_component, target_file_pairs):
- imported_filegroup_dir = os.path.join(metadata[component]['path'], 'imported', exporting_component)
-
- bp_content = ''
- for name, outpaths in target_file_pairs:
- bp_content += ('filegroup {{\n'
- ' name: "{fname}",\n'
- ' srcs: [\n'.format(fname=name))
- for outpath in outpaths:
- bp_content += ' "{outfile}",\n'.format(outfile=os.path.basename(outpath))
- bp_content += (' ],\n'
- '}\n')
-
- with tempfile.TemporaryDirectory() as tmp_dir:
- with open(os.path.join(tmp_dir, 'Android.bp'), 'w') as fout:
- fout.write(bp_content)
- for _, outpaths in target_file_pairs:
- for outpath in outpaths:
- os.symlink(os.path.join(metadata[exporting_component]['path'], outpath),
- os.path.join(tmp_dir, os.path.basename(outpath)))
- cmp_result = filecmp.dircmp(tmp_dir, imported_filegroup_dir)
- if os.path.exists(imported_filegroup_dir) and len(
- cmp_result.left_only) + len(cmp_result.right_only) + len(
- cmp_result.diff_files) == 0:
- # Files are identical, it doesn't need to be written
- logging.info(
- 'imported files exists and the contents are identical: {} -> {}'
- .format(component, exporting_component))
- continue
- logging.info('creating symlinks for imported files: {} -> {}'.format(
- component, exporting_component))
- os.makedirs(imported_filegroup_dir, exist_ok=True)
- shutil.rmtree(imported_filegroup_dir, ignore_errors=True)
- shutil.move(tmp_dir, imported_filegroup_dir)
-
-
-def prepare_build(metadata, component):
- imported_dir = os.path.join(metadata[component]['path'], 'imported')
- if utils.META_DEPS not in metadata[component]:
- if os.path.exists(imported_dir):
- logging.debug('remove {}'.format(imported_dir))
- shutil.rmtree(imported_dir)
- return
-
- imported_components = set()
- for exp_comp in metadata[component][utils.META_DEPS]:
- if utils.META_FILEGROUP in metadata[component][utils.META_DEPS][exp_comp]:
- filegroups = metadata[component][utils.META_DEPS][exp_comp][utils.META_FILEGROUP]
- target_file_pairs = []
- for name in filegroups:
- target_file_pairs.append((name, filegroups[name]))
- import_filegroups(metadata, component, exp_comp, target_file_pairs)
- imported_components.add(exp_comp)
-
- # Remove directories that are not generated this time.
- if os.path.exists(imported_dir):
- if len(imported_components) == 0:
- shutil.rmtree(imported_dir)
- else:
- for remove_target in set(os.listdir(imported_dir)) - imported_components:
- logging.info('remove unnecessary imported dir: {}'.format(remove_target))
- shutil.rmtree(os.path.join(imported_dir, remove_target))
-
-
-def main():
- utils.set_logging_config(logging.DEBUG)
- ctx = utils.get_build_context()
-
- logging.info('collecting metadata')
-
- utils.set_logging_config(True)
-
- goals = sys.argv[1:]
- if not goals:
- logging.debug('empty goals. defaults to main')
- goals = ['main']
-
- logging.debug('goals: ' + str(goals))
-
- # Force update the metadata for the 'update-meta' build
- metadata_collector = collect_metadata.MetadataCollector(
- ctx.components_top(), ctx.out_dir(),
- collect_metadata.COMPONENT_METADATA_DIR,
- collect_metadata.COMPONENT_METADATA_FILE,
- force_update='update-meta' in goals)
- metadata_collector.collect()
-
- metadata = metadata_collector.get_metadata()
- logging.debug('metadata: ' + str(metadata))
-
- parsed_goals = parse_goals(ctx, metadata, goals)
- logging.debug('parsed goals: ' + str(parsed_goals))
-
- sorted_goals = sorted_goals_with_dependencies(ctx, metadata, parsed_goals)
- logging.debug('sorted goals with deps: ' + str(sorted_goals))
-
- for component, subgoals in sorted_goals:
- if component == 'all':
- run_build_all(ctx, metadata, subgoals)
- continue
- prepare_build(metadata, component)
- run_build(ctx, metadata, component, subgoals)
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/collect_metadata.py b/orchestrator/demo/collect_metadata.py
deleted file mode 100755
index 148167d3eb..0000000000
--- a/orchestrator/demo/collect_metadata.py
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import copy
-import json
-import logging
-import os
-import sys
-import yaml
-from collections import defaultdict
-from typing import (
- List,
- Set,
-)
-
-import utils
-
-# SKIP_COMPONENT_SEARCH = (
-# 'tools',
-# )
-COMPONENT_METADATA_DIR = '.repo'
-COMPONENT_METADATA_FILE = 'treeinfo.yaml'
-GENERATED_METADATA_FILE = 'metadata.json'
-COMBINED_METADATA_FILENAME = 'multitree_meta.json'
-
-
-class Dep(object):
- def __init__(self, name, component, deps_type):
- self.name = name
- self.component = component
- self.type = deps_type
- self.out_paths = list()
-
-
-class ExportedDep(Dep):
- def __init__(self, name, component, deps_type):
- super().__init__(name, component, deps_type)
-
- def setOutputPaths(self, output_paths: list):
- self.out_paths = output_paths
-
-
-class ImportedDep(Dep):
- required_type_map = {
- # import type: (required type, get imported module list)
- utils.META_FILEGROUP: (utils.META_MODULES, True),
- }
-
- def __init__(self, name, component, deps_type, import_map):
- super().__init__(name, component, deps_type)
- self.exported_deps: Set[ExportedDep] = set()
- self.imported_modules: List[str] = list()
- self.required_type = deps_type
- get_imported_module = False
- if deps_type in ImportedDep.required_type_map:
- self.required_type, get_imported_module = ImportedDep.required_type_map[deps_type]
- if get_imported_module:
- self.imported_modules = import_map[name]
- else:
- self.imported_modules.append(name)
-
- def verify_and_add(self, exported: ExportedDep):
- if self.required_type != exported.type:
- raise RuntimeError(
- '{comp} components imports {module} for {imp_type} but it is exported as {exp_type}.'
- .format(comp=self.component, module=exported.name, imp_type=self.required_type, exp_type=exported.type))
- self.exported_deps.add(exported)
- self.out_paths.extend(exported.out_paths)
- # Remove duplicates. We may not use set() which is not JSON serializable
- self.out_paths = list(dict.fromkeys(self.out_paths))
-
-
-class MetadataCollector(object):
- """Visit all component directories and collect the metadata from them.
-
-Example of metadata:
-==========
-build_cmd: m # build command for this component. 'm' if omitted
-out_dir: out # out dir of this component. 'out' if omitted
-exports:
- libraries:
- - name: libopenjdkjvm
- - name: libopenjdkjvmd
- build_cmd: mma # build command for libopenjdkjvmd if specified
- out_dir: out/soong # out dir for libopenjdkjvmd if specified
- - name: libctstiagent
- APIs:
- - api1
- - api2
-imports:
- libraries:
- - lib1
- - lib2
- APIs:
- - import_api1
- - import_api2
-lunch_targets:
- - arm64
- - x86_64
-"""
-
- def __init__(self, component_top, out_dir, meta_dir, meta_file, force_update=False):
- if not os.path.exists(out_dir):
- os.makedirs(out_dir)
-
- self.__component_top = component_top
- self.__out_dir = out_dir
- self.__metadata_path = os.path.join(meta_dir, meta_file)
- self.__combined_metadata_path = os.path.join(self.__out_dir,
- COMBINED_METADATA_FILENAME)
- self.__force_update = force_update
-
- self.__metadata = dict()
- self.__map_exports = dict()
- self.__component_set = set()
-
- def collect(self):
- """ Read precomputed combined metadata from the json file.
-
- If any components have updated their metadata, update the metadata
- information and the json file.
- """
- timestamp = self.__restore_metadata()
- if timestamp and os.path.getmtime(__file__) > timestamp:
- logging.info('Update the metadata as the orchestrator has been changed')
- self.__force_update = True
- self.__collect_from_components(timestamp)
-
- def get_metadata(self):
- """ Returns collected metadata from all components"""
- if not self.__metadata:
- logging.warning('Metadata is empty')
- return copy.deepcopy(self.__metadata)
-
- def __collect_from_components(self, timestamp):
- """ Read metadata from all components
-
- If any components have newer metadata files or are removed, update the
- combined metadata.
- """
- metadata_updated = False
- for component in os.listdir(self.__component_top):
- # if component in SKIP_COMPONENT_SEARCH:
- # continue
- if self.__read_component_metadata(timestamp, component):
- metadata_updated = True
- if self.__read_generated_metadata(timestamp, component):
- metadata_updated = True
-
- deleted_components = set()
- for meta in self.__metadata:
- if meta not in self.__component_set:
- logging.info('Component {} is removed'.format(meta))
- deleted_components.add(meta)
- metadata_updated = True
- for meta in deleted_components:
- del self.__metadata[meta]
-
- if metadata_updated:
- self.__update_dependencies()
- self.__store_metadata()
- logging.info('Metadata updated')
-
- def __read_component_metadata(self, timestamp, component):
- """ Search for the metadata file from a component.
-
- If the metadata is modified, read the file and update the metadata.
- """
- component_path = os.path.join(self.__component_top, component)
- metadata_file = os.path.join(component_path, self.__metadata_path)
- logging.info(
- 'Reading a metadata file from {} component ...'.format(component))
- if not os.path.isfile(metadata_file):
- logging.warning('Metadata file {} not found!'.format(metadata_file))
- return False
-
- self.__component_set.add(component)
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(metadata_file):
- logging.info('... yaml not changed. Skip')
- return False
-
- with open(metadata_file) as f:
- meta = yaml.load(f, Loader=yaml.SafeLoader)
-
- meta['path'] = component_path
- if utils.META_BUILDCMD not in meta:
- meta[utils.META_BUILDCMD] = utils.DEFAULT_BUILDCMD
- if utils.META_OUTDIR not in meta:
- meta[utils.META_OUTDIR] = utils.DEFAULT_OUTDIR
-
- if utils.META_IMPORTS not in meta:
- meta[utils.META_IMPORTS] = defaultdict(dict)
- if utils.META_EXPORTS not in meta:
- meta[utils.META_EXPORTS] = defaultdict(dict)
-
- self.__metadata[component] = meta
- return True
-
- def __read_generated_metadata(self, timestamp, component):
- """ Read a metadata gerated by 'update-meta' build command from the soong build system
-
- Soong generate the metadata that has the information of import/export module/files.
- Build orchestrator read the generated metadata to collect the dependency information.
-
- Generated metadata has the following format:
- {
- "Imported": {
- "FileGroups": {
- "<name_of_filegroup>": [
- "<exported_module_name>",
- ...
- ],
- ...
- }
- }
- "Exported": {
- "<exported_module_name>": [
- "<output_file_path>",
- ...
- ],
- ...
- }
- }
- """
- if component not in self.__component_set:
- # skip reading generated metadata if the component metadata file was missing
- return False
- component_out = os.path.join(self.__component_top, component, self.__metadata[component][utils.META_OUTDIR])
- generated_metadata_file = os.path.join(component_out, 'soong', 'multitree', GENERATED_METADATA_FILE)
- if not os.path.isfile(generated_metadata_file):
- logging.info('... Soong did not generated the metadata file. Skip')
- return False
- if not self.__force_update and timestamp and timestamp > os.path.getmtime(generated_metadata_file):
- logging.info('... Soong generated metadata not changed. Skip')
- return False
-
- with open(generated_metadata_file, 'r') as gen_meta_json:
- try:
- gen_metadata = json.load(gen_meta_json)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- generated_metadata_file))
- return False
-
- if utils.SOONG_IMPORTED in gen_metadata:
- imported = gen_metadata[utils.SOONG_IMPORTED]
- if utils.SOONG_IMPORTED_FILEGROUPS in imported:
- self.__metadata[component][utils.META_IMPORTS][utils.META_FILEGROUP] = imported[utils.SOONG_IMPORTED_FILEGROUPS]
- if utils.SOONG_EXPORTED in gen_metadata:
- self.__metadata[component][utils.META_EXPORTS][utils.META_MODULES] = gen_metadata[utils.SOONG_EXPORTED]
-
- return True
-
- def __update_export_map(self):
- """ Read metadata of all components and update the export map
-
- 'libraries' and 'APIs' are special exproted types that are provided manually
- from the .yaml metadata files. These need to be replaced with the implementation
- in soong gerated metadata.
- The export type 'module' is generated from the soong build system from the modules
- with 'export: true' property. This export type includes a dictionary with module
- names as keys and their output files as values. These output files will be used as
- prebuilt sources when generating the imported modules.
- """
- self.__map_exports = dict()
- for comp in self.__metadata:
- if utils.META_EXPORTS not in self.__metadata[comp]:
- continue
- exports = self.__metadata[comp][utils.META_EXPORTS]
-
- for export_type in exports:
- for module in exports[export_type]:
- if export_type == utils.META_LIBS:
- name = module[utils.META_LIB_NAME]
- else:
- name = module
-
- if name in self.__map_exports:
- raise RuntimeError(
- 'Exported libs conflict!!!: "{name}" in the {comp} component is already exported by the {prev} component.'
- .format(name=name, comp=comp, prev=self.__map_exports[name][utils.EXP_COMPONENT]))
- exported_deps = ExportedDep(name, comp, export_type)
- if export_type == utils.META_MODULES:
- exported_deps.setOutputPaths(exports[export_type][module])
- self.__map_exports[name] = exported_deps
-
- def __verify_and_add_dependencies(self, component):
- """ Search all imported items from the export_map.
-
- If any imported items are not provided by the other components, report
- an error.
- Otherwise, add the component dependency and update the exported information to the
- import maps.
- """
- def verify_and_add_dependencies(imported_dep: ImportedDep):
- for module in imported_dep.imported_modules:
- if module not in self.__map_exports:
- raise RuntimeError(
- 'Imported item not found!!!: Imported module "{module}" in the {comp} component is not exported from any other components.'
- .format(module=module, comp=imported_dep.component))
- imported_dep.verify_and_add(self.__map_exports[module])
-
- deps = self.__metadata[component][utils.META_DEPS]
- exp_comp = self.__map_exports[module].component
- if exp_comp not in deps:
- deps[exp_comp] = defaultdict(defaultdict)
- deps[exp_comp][imported_dep.type][imported_dep.name] = imported_dep.out_paths
-
- self.__metadata[component][utils.META_DEPS] = defaultdict()
- imports = self.__metadata[component][utils.META_IMPORTS]
- for import_type in imports:
- for module in imports[import_type]:
- verify_and_add_dependencies(ImportedDep(module, component, import_type, imports[import_type]))
-
- def __check_imports(self):
- """ Search the export map to find the component to import libraries or APIs.
-
- Update the 'deps' field that includes the dependent components.
- """
- for component in self.__metadata:
- self.__verify_and_add_dependencies(component)
- if utils.META_DEPS in self.__metadata[component]:
- logging.debug('{comp} depends on {list} components'.format(
- comp=component, list=self.__metadata[component][utils.META_DEPS]))
-
- def __update_dependencies(self):
- """ Generate a dependency graph for the components
-
- Update __map_exports and the dependency graph with the maps.
- """
- self.__update_export_map()
- self.__check_imports()
-
- def __store_metadata(self):
- """ Store the __metadata dictionary as json format"""
- with open(self.__combined_metadata_path, 'w') as json_file:
- json.dump(self.__metadata, json_file, indent=2)
-
- def __restore_metadata(self):
- """ Read the stored json file and return the time stamps of the
-
- metadata file.
- """
- if not os.path.exists(self.__combined_metadata_path):
- return None
-
- with open(self.__combined_metadata_path, 'r') as json_file:
- try:
- self.__metadata = json.load(json_file)
- except json.decoder.JSONDecodeError:
- logging.warning('JSONDecodeError!!!: skip reading the {} file'.format(
- self.__combined_metadata_path))
- return None
-
- logging.info('Metadata restored from {}'.format(
- self.__combined_metadata_path))
- self.__update_export_map()
- return os.path.getmtime(self.__combined_metadata_path)
-
-
-def get_args():
-
- def check_dir(path):
- if os.path.exists(path) and os.path.isdir(path):
- return os.path.normpath(path)
- else:
- raise argparse.ArgumentTypeError('\"{}\" is not a directory'.format(path))
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--component-top',
- help='Scan all components under this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../components'),
- type=check_dir)
- parser.add_argument(
- '--meta-file',
- help='Name of the metadata file.',
- default=COMPONENT_METADATA_FILE,
- type=str)
- parser.add_argument(
- '--meta-dir',
- help='Each component has the metadata in this directory.',
- default=COMPONENT_METADATA_DIR,
- type=str)
- parser.add_argument(
- '--out-dir',
- help='Out dir for the outer tree. The orchestrator stores the collected metadata in this directory.',
- default=os.path.join(os.path.dirname(__file__), '../../../out'),
- type=os.path.normpath)
- parser.add_argument(
- '--force',
- '-f',
- action='store_true',
- help='Force to collect metadata',
- )
- parser.add_argument(
- '--verbose',
- '-v',
- help='Increase output verbosity, e.g. "-v", "-vv".',
- action='count',
- default=0)
- return parser.parse_args()
-
-
-def main():
- args = get_args()
- utils.set_logging_config(args.verbose)
-
- metadata_collector = MetadataCollector(args.component_top, args.out_dir,
- args.meta_dir, args.meta_file, args.force)
- metadata_collector.collect()
-
-
-if __name__ == '__main__':
- main()
diff --git a/orchestrator/demo/envsetup.sh b/orchestrator/demo/envsetup.sh
deleted file mode 100644
index 902a37c2a2..0000000000
--- a/orchestrator/demo/envsetup.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-
-function buffet()
-{
- local product variant selection
- if [[ $# -ne 1 ]]; then
- echo "usage: buffet [target]" >&2
- return 1
- fi
-
- selection=$1
- product=${selection%%-*} # Trim everything after first dash
- variant=${selection#*-} # Trim everything up to first dash
-
- if [ -z "$product" ]
- then
- echo
- echo "Invalid lunch combo: $selection"
- return 1
- fi
-
- if [ -z "$variant" ]
- then
- if [[ "$product" =~ .*_(eng|user|userdebug) ]]
- then
- echo "Did you mean -${product/*_/}? (dash instead of underscore)"
- fi
- return 1
- fi
-
- BUFFET_BUILD_TOP=$(pwd) python3 tools/build/orchestrator/buffet_helper.py $1 || return 1
-
- export BUFFET_BUILD_TOP=$(pwd)
- export BUFFET_COMPONENTS_TOP=$BUFFET_BUILD_TOP/components
- export BUFFET_TARGET_PRODUCT=$product
- export BUFFET_TARGET_BUILD_VARIANT=$variant
- export BUFFET_TARGET_BUILD_TYPE=release
-}
-
-function m()
-{
- if [ -z "$BUFFET_BUILD_TOP" ]
- then
- echo "Run \"buffet [target]\" first"
- return 1
- fi
- python3 $BUFFET_BUILD_TOP/tools/build/orchestrator/build_helper.py "$@"
-}
diff --git a/orchestrator/demo/hierarchy.py b/orchestrator/demo/hierarchy.py
deleted file mode 100644
index ae1825c049..0000000000
--- a/orchestrator/demo/hierarchy.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import yaml
-
-
-def parse_hierarchy(build_top):
- """Parse build hierarchy file from given build top directory, and returns a dict from child targets to parent targets.
-
- Example of hierarchy file:
- ==========
- aosp_arm64:
- - armv8
- - aosp_cf_arm64_phone
-
- armv8:
- - aosp_oriole
- - aosp_sunfish
-
- aosp_oriole:
- - oriole
-
- aosp_sunfish:
- - sunfish
-
- oriole:
- # leaf
-
- sunfish:
- # leaf
- ==========
-
- If we parse this yaml, we get a dict looking like:
-
- {
- "sunfish": "aosp_sunfish",
- "oriole": "aosp_oriole",
- "aosp_oriole": "armv8",
- "aosp_sunfish": "armv8",
- "armv8": "aosp_arm64",
- "aosp_cf_arm64_phone": "aosp_arm64",
- "aosp_arm64": None, # no parent
- }
- """
- metadata_path = os.path.join(build_top, 'tools', 'build', 'hierarchy.yaml')
- if not os.path.isfile(metadata_path):
- raise RuntimeError("target metadata file %s doesn't exist" % metadata_path)
-
- with open(metadata_path, 'r') as f:
- hierarchy_yaml = yaml.load(f, Loader=yaml.SafeLoader)
-
- hierarchy_map = dict()
-
- for parent_target, child_targets in hierarchy_yaml.items():
- if not child_targets:
- # leaf
- continue
- for child_target in child_targets:
- hierarchy_map[child_target] = parent_target
-
- for parent_target in hierarchy_yaml:
- # targets with no parent
- if parent_target not in hierarchy_map:
- hierarchy_map[parent_target] = None
-
- return hierarchy_map
diff --git a/orchestrator/demo/hierarchy.yaml b/orchestrator/demo/hierarchy.yaml
deleted file mode 100644
index cc6de4df3d..0000000000
--- a/orchestrator/demo/hierarchy.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
-# hierarchy of targets
-
-aosp_arm64:
-- armv8
-- aosp_cf_arm64_phone
-
-armv8:
-- mainline_modules_arm64
-
-mainline_modules_arm64:
-- aosp_oriole
-- aosp_sunfish
-- aosp_raven
-
-aosp_oriole:
-- oriole
-
-aosp_sunfish:
-- sunfish
-
-aosp_raven:
-- raven
-
-oriole:
-# leaf
-
-sunfish:
-# leaf
-
-raven:
-# leaf
-
-aosp_cf_arm64_phone:
-- cf_arm64_phone
-
-cf_arm64_phone:
-# leaf
diff --git a/orchestrator/demo/utils.py b/orchestrator/demo/utils.py
deleted file mode 100644
index 5dbbe4aa9f..0000000000
--- a/orchestrator/demo/utils.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import logging
-import os
-
-# default build configuration for each component
-DEFAULT_BUILDCMD = 'm'
-DEFAULT_OUTDIR = 'out'
-
-# yaml fields
-META_BUILDCMD = 'build_cmd'
-META_OUTDIR = 'out_dir'
-META_EXPORTS = 'exports'
-META_IMPORTS = 'imports'
-META_TARGETS = 'lunch_targets'
-META_DEPS = 'deps'
-# fields under 'exports' and 'imports'
-META_LIBS = 'libraries'
-META_APIS = 'APIs'
-META_FILEGROUP = 'filegroup'
-META_MODULES = 'modules'
-# fields under 'libraries'
-META_LIB_NAME = 'name'
-
-# fields for generated metadata file
-SOONG_IMPORTED = 'Imported'
-SOONG_IMPORTED_FILEGROUPS = 'FileGroups'
-SOONG_EXPORTED = 'Exported'
-
-# export map items
-EXP_COMPONENT = 'component'
-EXP_TYPE = 'type'
-EXP_OUTPATHS = 'outpaths'
-
-class BuildContext:
-
- def __init__(self):
- self._build_top = os.getenv('BUFFET_BUILD_TOP')
- self._components_top = os.getenv('BUFFET_COMPONENTS_TOP')
- self._target_product = os.getenv('BUFFET_TARGET_PRODUCT')
- self._target_build_variant = os.getenv('BUFFET_TARGET_BUILD_VARIANT')
- self._target_build_type = os.getenv('BUFFET_TARGET_BUILD_TYPE')
- self._out_dir = os.path.join(self._build_top, 'out')
-
- if not self._build_top:
- raise RuntimeError("Can't find root. Did you run buffet?")
-
- def build_top(self):
- return self._build_top
-
- def components_top(self):
- return self._components_top
-
- def target_product(self):
- return self._target_product
-
- def target_build_variant(self):
- return self._target_build_variant
-
- def target_build_type(self):
- return self._target_build_type
-
- def out_dir(self):
- return self._out_dir
-
-
-def get_build_context():
- return BuildContext()
-
-
-def set_logging_config(verbose_level):
- verbose_map = (logging.WARNING, logging.INFO, logging.DEBUG)
- verbosity = min(verbose_level, 2)
- logging.basicConfig(
- format='%(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
- level=verbose_map[verbosity])
diff --git a/orchestrator/inner_build/common.py b/orchestrator/inner_build/common.py
deleted file mode 100644
index 382844bd98..0000000000
--- a/orchestrator/inner_build/common.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-def _parse_arguments(argv):
- argv = argv[1:]
- """Return an argparse options object."""
- # Top-level parser
- parser = argparse.ArgumentParser(prog=".inner_build")
-
- parser.add_argument("--out_dir", action="store", required=True,
- help="root of the output directory for this inner tree's API contributions")
-
- parser.add_argument("--api_domain", action="append", required=True,
- help="which API domains are to be built in this inner tree")
-
- subparsers = parser.add_subparsers(required=True, dest="command",
- help="subcommands")
-
- # inner_build describe command
- describe_parser = subparsers.add_parser("describe",
- help="describe the capabilities of this inner tree's build system")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("export_api_contributions",
- help="export the API contributions of this inner tree")
-
- # create the parser for the "b" command
- export_parser = subparsers.add_parser("analyze",
- help="main build analysis for this inner tree")
-
- # Parse the arguments
- return parser.parse_args(argv)
-
-
-class Commands(object):
- def Run(self, argv):
- """Parse the command arguments and call the corresponding subcommand method on
- this object.
-
- Throws AttributeError if the method for the command wasn't found.
- """
- args = _parse_arguments(argv)
- return getattr(self, args.command)(args)
-
diff --git a/orchestrator/inner_build/inner_build_demo.py b/orchestrator/inner_build/inner_build_demo.py
deleted file mode 100755
index 264739b8c9..0000000000
--- a/orchestrator/inner_build/inner_build_demo.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import sys
-import textwrap
-
-sys.dont_write_bytecode = True
-import common
-
-def mkdirs(path):
- try:
- os.makedirs(path)
- except FileExistsError:
- pass
-
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- mkdirs(args.out_dir)
-
- with open(os.path.join(args.out_dir, "tree_info.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "requires_ninja": true,
- "orchestrator_protocol_version": 1
- }"""))
-
- def export_api_contributions(self, args):
- contributions_dir = os.path.join(args.out_dir, "api_contributions")
- mkdirs(contributions_dir)
-
- if "system" in args.api_domain:
- with open(os.path.join(contributions_dir, "api_a-1.json"), "w") as f:
- # 'name: android' is android.jar
- f.write(textwrap.dedent("""\
- {
- "name": "api_a",
- "version": 1,
- "api_domain": "system",
- "cc_libraries": [
- {
- "name": "libhello1",
- "headers": [
- {
- "root": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "files": [
- "hello1.h"
- ]
- }
- ],
- "api": [
- "build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1"
- ]
- }
- ]
- }"""))
-
- def analyze(self, args):
- if "system" in args.api_domain:
- # Nothing to export in this demo
- # Write a fake inner_tree.ninja; what the inner tree would have generated
- with open(os.path.join(args.out_dir, "inner_tree.ninja"), "w") as f:
- # TODO: Note that this uses paths relative to the workspace not the iner tree
- # for demo purposes until we get the ninja chdir change in.
- f.write(textwrap.dedent("""\
- rule compile_c
- command = mkdir -p ${out_dir} && g++ -c ${cflags} -o ${out} ${in}
- rule link_so
- command = mkdir -p ${out_dir} && gcc -shared -o ${out} ${in}
- build %(OUT_DIR)s/libhello1/hello1.o: compile_c build/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
- out_dir = %(OUT_DIR)s/libhello1
- cflags = -Ibuild/build/make/orchestrator/test_workspace/inner_tree_1/libhello1/include
- build %(OUT_DIR)s/libhello1/libhello1.so: link_so %(OUT_DIR)s/libhello1/hello1.o
- out_dir = %(OUT_DIR)s/libhello1
- build system: phony %(OUT_DIR)s/libhello1/libhello1.so
- """ % { "OUT_DIR": args.out_dir }))
- with open(os.path.join(args.out_dir, "build_targets.json"), "w") as f:
- f.write(textwrap.dedent("""\
- {
- "staging": [
- {
- "dest": "staging/system/lib/libhello1.so",
- "obj": "libhello1/libhello1.so"
- }
- ]
- }""" % { "OUT_DIR": args.out_dir }))
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
-
-
-# vim: sts=4:ts=4:sw=4
diff --git a/orchestrator/inner_build/inner_build_soong.py b/orchestrator/inner_build/inner_build_soong.py
deleted file mode 100755
index a653dcca8c..0000000000
--- a/orchestrator/inner_build/inner_build_soong.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/python3
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import sys
-
-sys.dont_write_bytecode = True
-import common
-
-class InnerBuildSoong(common.Commands):
- def describe(self, args):
- pass
-
-
- def export_api_contributions(self, args):
- pass
-
-
-def main(argv):
- return InnerBuildSoong().Run(argv)
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv))
diff --git a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo b/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
deleted file mode 100644
index 079022611d..0000000000
--- a/orchestrator/multitree_combos/aosp_cf_arm64_phone.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "vendor": {
- "tree": "master",
- "product": "aosp_cf_arm64_phone"
- },
- "modules": {
- "com.android.bionic": {
- "tree": "sc-mainline-prod"
- }
- }
-}
diff --git a/orchestrator/multitree_combos/test.mcombo b/orchestrator/multitree_combos/test.mcombo
deleted file mode 100644
index 3ad0717577..0000000000
--- a/orchestrator/multitree_combos/test.mcombo
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "inner_tree_system",
- "product": "system_lunch_product"
- },
- "vendor": {
- "tree": "inner_tree_vendor",
- "product": "vendor_lunch_product"
- },
- "modules": {
- "com.android.something": {
- "tree": "inner_tree_module"
- }
- }
-}
diff --git a/orchestrator/ninja/ninja_syntax.py b/orchestrator/ninja/ninja_syntax.py
deleted file mode 100644
index df97b68f09..0000000000
--- a/orchestrator/ninja/ninja_syntax.py
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from abc import ABC, abstractmethod
-
-from collections.abc import Iterator
-from typing import List
-
-TAB = " "
-
-class Node(ABC):
- '''An abstract class that can be serialized to a ninja file
- All other ninja-serializable classes inherit from this class'''
-
- @abstractmethod
- def stream(self) -> Iterator[str]:
- pass
-
-class Variable(Node):
- '''A ninja variable that can be reused across build actions
- https://ninja-build.org/manual.html#_variables'''
-
- def __init__(self, name:str, value:str, indent=0):
- self.name = name
- self.value = value
- self.indent = indent
-
- def stream(self) -> Iterator[str]:
- indent = TAB * self.indent
- yield f"{indent}{self.name} = {self.value}"
-
-class RuleException(Exception):
- pass
-
-# Ninja rules recognize a limited set of variables
-# https://ninja-build.org/manual.html#ref_rule
-# Keep this list sorted
-RULE_VARIABLES = ["command",
- "depfile",
- "deps",
- "description",
- "dyndep",
- "generator",
- "msvc_deps_prefix",
- "restat",
- "rspfile",
- "rspfile_content"]
-
-class Rule(Node):
- '''A shorthand for a command line that can be reused
- https://ninja-build.org/manual.html#_rules'''
-
- def __init__(self, name:str):
- self.name = name
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- if name not in RULE_VARIABLES:
- raise RuleException(f"{name} is not a recognized variable in a ninja rule")
-
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate_rule()
-
- yield f"rule {self.name}"
- # Yield rule variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate_rule(self):
- # command is a required variable in a ninja rule
- self._assert_variable_is_not_empty(variable_name="command")
-
- def _assert_variable_is_not_empty(self, variable_name: str):
- if not any(var.name == variable_name for var in self.variables):
- raise RuleException(f"{variable_name} is required in a ninja rule")
-
-class BuildActionException(Exception):
- pass
-
-class BuildAction(Node):
- '''Describes the dependency edge between inputs and output
- https://ninja-build.org/manual.html#_build_statements'''
-
- def __init__(self, output: str, rule: str, inputs: List[str]=None, implicits: List[str]=None, order_only: List[str]=None):
- self.output = output
- self.rule = rule
- self.inputs = self._as_list(inputs)
- self.implicits = self._as_list(implicits)
- self.order_only = self._as_list(order_only)
- self.variables = []
-
- def add_variable(self, name: str, value: str):
- '''Variables limited to the scope of this build action'''
- self.variables.append(Variable(name=name, value=value, indent=1))
-
- def stream(self) -> Iterator[str]:
- self._validate()
-
- build_statement = f"build {self.output}: {self.rule}"
- if len(self.inputs) > 0:
- build_statement += " "
- build_statement += " ".join(self.inputs)
- if len(self.implicits) > 0:
- build_statement += " | "
- build_statement += " ".join(self.implicits)
- if len(self.order_only) > 0:
- build_statement += " || "
- build_statement += " ".join(self.order_only)
- yield build_statement
- # Yield variables sorted by `name`
- for var in sorted(self.variables, key=lambda x: x.name):
- # variables yield a single item, next() is sufficient
- yield next(var.stream())
-
- def _validate(self):
- if not self.output:
- raise BuildActionException("Output is required in a ninja build statement")
- if not self.rule:
- raise BuildActionException("Rule is required in a ninja build statement")
-
- def _as_list(self, list_like):
- if list_like is None:
- return []
- if isinstance(list_like, list):
- return list_like
- return [list_like]
-
-class Pool(Node):
- '''https://ninja-build.org/manual.html#ref_pool'''
-
- def __init__(self, name: str, depth: int):
- self.name = name
- self.depth = Variable(name="depth", value=depth, indent=1)
-
- def stream(self) -> Iterator[str]:
- yield f"pool {self.name}"
- yield next(self.depth.stream())
-
-class Subninja(Node):
-
- def __init__(self, subninja: str, chDir: str):
- self.subninja = subninja
- self.chDir = chDir
-
- # TODO(spandandas): Update the syntax when aosp/2064612 lands
- def stream(self) -> Iterator[str]:
- yield f"subninja {self.subninja}"
-
-class Line(Node):
- '''Generic class that can be used for comments/newlines/default_target etc'''
-
- def __init__(self, value:str):
- self.value = value
-
- def stream(self) -> Iterator[str]:
- yield self.value
diff --git a/orchestrator/ninja/ninja_writer.py b/orchestrator/ninja/ninja_writer.py
deleted file mode 100644
index 9e80b4b753..0000000000
--- a/orchestrator/ninja/ninja_writer.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ninja_syntax import Variable, BuildAction, Rule, Pool, Subninja, Line
-
-# TODO: Format the output according to a configurable width variable
-# This will ensure that the generated content fits on a screen and does not
-# require horizontal scrolling
-class Writer:
-
- def __init__(self, file):
- self.file = file
- self.nodes = [] # type Node
-
- def add_variable(self, variable: Variable):
- self.nodes.append(variable)
-
- def add_rule(self, rule: Rule):
- self.nodes.append(rule)
-
- def add_build_action(self, build_action: BuildAction):
- self.nodes.append(build_action)
-
- def add_pool(self, pool: Pool):
- self.nodes.append(pool)
-
- def add_comment(self, comment: str):
- self.nodes.append(Line(value=f"# {comment}"))
-
- def add_default(self, default: str):
- self.nodes.append(Line(value=f"default {default}"))
-
- def add_newline(self):
- self.nodes.append(Line(value=""))
-
- def add_subninja(self, subninja: Subninja):
- self.nodes.append(subninja)
-
- def add_phony(self, name, deps):
- build_action = BuildAction(name, "phony", inputs=deps)
- self.add_build_action(build_action)
-
- def write(self):
- for node in self.nodes:
- for line in node.stream():
- print(line, file=self.file)
diff --git a/orchestrator/ninja/test_ninja_syntax.py b/orchestrator/ninja/test_ninja_syntax.py
deleted file mode 100644
index d922fd2298..0000000000
--- a/orchestrator/ninja/test_ninja_syntax.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from ninja_syntax import Variable, Rule, RuleException, BuildAction, BuildActionException, Pool
-
-class TestVariable(unittest.TestCase):
-
- def test_assignment(self):
- variable = Variable(name="key", value="value")
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value with spaces")
- self.assertEqual("key = value with spaces", next(variable.stream()))
- variable = Variable(name="key", value="$some_other_variable")
- self.assertEqual("key = $some_other_variable", next(variable.stream()))
-
- def test_indentation(self):
- variable = Variable(name="key", value="value", indent=0)
- self.assertEqual("key = value", next(variable.stream()))
- variable = Variable(name="key", value="value", indent=1)
- self.assertEqual(" key = value", next(variable.stream()))
-
-class TestRule(unittest.TestCase):
-
- def test_rulename_comes_first(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- self.assertEqual("rule myrule", next(rule.stream()))
-
- def test_command_is_a_required_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- next(rule.stream())
-
- def test_bad_rule_variable(self):
- rule = Rule(name="myrule")
- with self.assertRaises(RuleException):
- rule.add_variable(name="unrecognize_rule_variable", value="value")
-
- def test_rule_variables_are_indented(self):
- rule = Rule(name="myrule")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # top-level rule should not be indented
- self.assertEqual(" command = /bin/bash echo", next(stream))
-
- def test_rule_variables_are_sorted(self):
- rule = Rule(name="myrule")
- rule.add_variable("description", "Adding description before command")
- rule.add_variable("command", "/bin/bash echo")
- stream = rule.stream()
- self.assertEqual("rule myrule", next(stream)) # rule always comes first
- self.assertEqual(" command = /bin/bash echo", next(stream))
- self.assertEqual(" description = Adding description before command", next(stream))
-
-class TestBuildAction(unittest.TestCase):
-
- def test_no_inputs(self):
- build = BuildAction(output="out", rule="phony")
- stream = build.stream()
- self.assertEqual("build out: phony", next(stream))
- # Empty output
- build = BuildAction(output="", rule="phony")
- with self.assertRaises(BuildActionException):
- next(build.stream())
- # Empty rule
- build = BuildAction(output="out", rule="")
- with self.assertRaises(BuildActionException):
- next(build.stream())
-
- def test_inputs(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- self.assertEqual("build out: cat input1 input2", next(build.stream()))
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"], implicits=["implicits1", "implicits2"], order_only=["order_only1", "order_only2"])
- self.assertEqual("build out: cat input1 input2 | implicits1 implicits2 || order_only1 order_only2", next(build.stream()))
-
- def test_variables(self):
- build = BuildAction(output="out", rule="cat", inputs=["input1", "input2"])
- build.add_variable(name="myvar", value="myval")
- stream = build.stream()
- next(stream)
- self.assertEqual(" myvar = myval", next(stream))
-
-class TestPool(unittest.TestCase):
-
- def test_pool(self):
- pool = Pool(name="mypool", depth=10)
- stream = pool.stream()
- self.assertEqual("pool mypool", next(stream))
- self.assertEqual(" depth = 10", next(stream))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/ninja/test_ninja_writer.py b/orchestrator/ninja/test_ninja_writer.py
deleted file mode 100644
index 703dd4d8f6..0000000000
--- a/orchestrator/ninja/test_ninja_writer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from io import StringIO
-
-from ninja_writer import Writer
-from ninja_syntax import Variable, Rule, BuildAction
-
-class TestWriter(unittest.TestCase):
-
- def test_simple_writer(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_variable(Variable(name="cflags", value="-Wall"))
- writer.add_newline()
- cc = Rule(name="cc")
- cc.add_variable(name="command", value="gcc $cflags -c $in -o $out")
- writer.add_rule(cc)
- writer.add_newline()
- build_action = BuildAction(output="foo.o", rule="cc", inputs=["foo.c"])
- writer.add_build_action(build_action)
- writer.write()
- self.assertEqual('''cflags = -Wall
-
-rule cc
- command = gcc $cflags -c $in -o $out
-
-build foo.o: cc foo.c
-''', f.getvalue())
-
- def test_comment(self):
- with StringIO() as f:
- writer = Writer(f)
- writer.add_comment("This is a comment in a ninja file")
- writer.write()
- self.assertEqual("# This is a comment in a ninja file\n", f.getvalue())
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/orchestrator/test_workspace/combo.mcombo b/orchestrator/test_workspace/combo.mcombo
deleted file mode 100644
index 8200dc03ae..0000000000
--- a/orchestrator/test_workspace/combo.mcombo
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "lunchable": true,
- "system": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product1"
- },
- "vendor": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1",
- "product": "test_product2"
- },
- "modules": {
- "module_1": {
- "tree": "build/build/make/orchestrator/test_workspace/inner_tree_1"
- }
- }
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/.inner_build b/orchestrator/test_workspace/inner_tree_1/.inner_build
deleted file mode 120000
index d8f235fb5e..0000000000
--- a/orchestrator/test_workspace/inner_tree_1/.inner_build
+++ /dev/null
@@ -1 +0,0 @@
-../../inner_build/inner_build_demo.py \ No newline at end of file
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c b/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
deleted file mode 100644
index 1415082771..0000000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/hello1.c
+++ /dev/null
@@ -1,8 +0,0 @@
-#include <stdio.h>
-
-#include "hello1.h"
-
-void hello1(void) {
- printf("hello1");
-}
-
diff --git a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h b/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
deleted file mode 100644
index 0309c1c5c0..0000000000
--- a/orchestrator/test_workspace/inner_tree_1/libhello1/include/hello1.h
+++ /dev/null
@@ -1,4 +0,0 @@
-#pragma once
-
-extern "C" void hello1(void);
-
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 53714a8594..8c634f6a25 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -80,6 +80,3 @@ BOARD_CACHEIMAGE_PARTITION_SIZE := 16777216
# Setup a vendor image to let PRODUCT_VENDOR_PROPERTIES does not affect GSI
BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-
-# Disable 64 bit mediadrmserver
-TARGET_ENABLE_MEDIADRM_64 :=
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 45ed3daa7c..40be80e413 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -52,6 +52,9 @@ TARGET_2ND_ARCH_VARIANT := armv8-a
TARGET_2ND_CPU_VARIANT := generic
endif
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
include build/make/target/board/BoardConfigGsiCommon.mk
# Some vendors still haven't cleaned up all device specific directories under
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 93694f2d4f..e7f2ae0072 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -22,6 +22,9 @@ TARGET_2ND_CPU_ABI := x86
TARGET_2ND_ARCH := x86
TARGET_2ND_ARCH_VARIANT := x86_64
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
include build/make/target/board/BoardConfigGsiCommon.mk
ifndef BUILDING_GSI
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
index db6f3f04bc..db95082e5f 100644
--- a/target/board/gsi_arm64/BoardConfig.mk
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -27,6 +27,9 @@ TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
TARGET_2ND_CPU_VARIANT := generic
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
# TODO(b/111434759, b/111287060) SoC specific hacks
BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
diff --git a/target/product/OWNERS b/target/product/OWNERS
index b3d89980ee..61f7d45f0a 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -3,3 +3,8 @@ per-file runtime_libart.mk = calin@google.com, mast@google.com, ngeoffray@google
# GSI
per-file gsi_release.mk = file:/target/product/gsi/OWNERS
per-file developer_gsi_keys.mk = file:/target/product/gsi/OWNERS
+
+# Android Go
+per-file go_defaults.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_512.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_common.mk = gkaiser@google.com, rajekumar@google.com
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 90a2577797..494c7c13d3 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -378,7 +378,6 @@ PRODUCT_PACKAGES_DEBUG := \
procrank \
profcollectd \
profcollectctl \
- remount \
servicedispatcher \
showmap \
sqlite3 \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 5004b85ee1..fbc6cccefa 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -42,7 +42,6 @@ PRODUCT_HOST_PACKAGES += \
# Base modules and settings for the vendor partition.
PRODUCT_PACKAGES += \
android.hardware.cas@1.2-service \
- android.hardware.media.omx@1.0-service \
boringssl_self_test_vendor \
dumpsys_vendor \
fs_config_files_nonsystem \
@@ -69,6 +68,13 @@ PRODUCT_PACKAGES += \
selinux_policy_nonsystem \
shell_and_utilities_vendor \
+# OMX not supported for 64bit_only builds
+ifneq ($(TARGET_SUPPORTS_OMX_SERVICE),false)
+ PRODUCT_PACKAGES += \
+ android.hardware.media.omx@1.0-service \
+
+endif
+
# Base module when shipping api level is less than or equal to 29
PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
android.hardware.configstore@1.1-service \
diff --git a/target/product/core_64_bit.mk b/target/product/core_64_bit.mk
index 322fa80530..b9d22a6dd3 100644
--- a/target/product/core_64_bit.mk
+++ b/target/product/core_64_bit.mk
@@ -27,7 +27,11 @@ PRODUCT_COPY_FILES += system/core/rootdir/init.zygote64_32.rc:system/etc/init/hw
# Set the zygote property to select the 64-bit primary, 32-bit secondary script
# This line must be parsed before the one in core_minimal.mk
+ifeq ($(ZYGOTE_FORCE_64),true)
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64
+else
PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64_32
+endif
TARGET_SUPPORTS_32_BIT_APPS := true
TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
index 061728f291..fc2b8e5498 100644
--- a/target/product/core_64_bit_only.mk
+++ b/target/product/core_64_bit_only.mk
@@ -31,3 +31,4 @@ PRODUCT_VENDOR_PROPERTIES += dalvik.vm.dex2oat64.enabled=true
TARGET_SUPPORTS_32_BIT_APPS := false
TARGET_SUPPORTS_64_BIT_APPS := true
+TARGET_SUPPORTS_OMX_SERVICE := false
diff --git a/tools/build-runfiles.cc b/tools/build-runfiles.cc
index d92e663b7e..b6197f0708 100644
--- a/tools/build-runfiles.cc
+++ b/tools/build-runfiles.cc
@@ -147,7 +147,7 @@ class RunfilesCreator {
info->type = FILE_TYPE_REGULAR;
} else {
info->type = FILE_TYPE_SYMLINK;
- info->symlink_target = strdup(target);
+ info->symlink_target = target;
}
FileInfo parent_info;
diff --git a/tools/canoninja/go.mod b/tools/canoninja/go.mod
index c5a924e7c3..9e668a5409 100644
--- a/tools/canoninja/go.mod
+++ b/tools/canoninja/go.mod
@@ -1 +1,3 @@
module canoninja
+
+go 1.19
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index 61e21583aa..088915a33f 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -4,9 +4,17 @@ require google.golang.org/protobuf v0.0.0
replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
-require android/soong v0.0.0
+require (
+ android/soong v0.0.0
+ github.com/google/blueprint v0.0.0
+)
+
+require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
+
+replace android/soong v0.0.0 => ../../../soong
+
+replace github.com/google/blueprint => ../../../blueprint
-replace android/soong v0.0.0 => ../../../soong
// Indirect deps from golang-protobuf
exclude github.com/golang/protobuf v1.5.0
diff --git a/tools/compliance/go.sum b/tools/compliance/go.sum
new file mode 100644
index 0000000000..cbe76d9187
--- /dev/null
+++ b/tools/compliance/go.sum
@@ -0,0 +1,2 @@
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index d8e34b7b33..122202b390 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -519,23 +519,6 @@ python_binary_host {
}
python_binary_host {
- name: "fsverity_manifest_generator",
- defaults: ["releasetools_binary_defaults"],
- srcs: [
- "fsverity_manifest_generator.py",
- ],
- libs: [
- "fsverity_digests_proto_python",
- "releasetools_common",
- ],
- required: [
- "aapt2",
- "apksigner",
- "fsverity",
- ],
-}
-
-python_binary_host {
name: "fsverity_metadata_generator",
defaults: ["releasetools_binary_defaults"],
srcs: [
diff --git a/tools/releasetools/fsverity_manifest_generator.py b/tools/releasetools/fsverity_manifest_generator.py
deleted file mode 100644
index b8184bce51..0000000000
--- a/tools/releasetools/fsverity_manifest_generator.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-`fsverity_manifest_generator` generates build manifest APK file containing
-digests of target files. The APK file is signed so the manifest inside the APK
-can be trusted.
-"""
-
-import argparse
-import common
-import os
-import subprocess
-import sys
-from fsverity_digests_pb2 import FSVerityDigests
-
-HASH_ALGORITHM = 'sha256'
-
-def _digest(fsverity_path, input_file):
- cmd = [fsverity_path, 'digest', input_file]
- cmd.extend(['--compact'])
- cmd.extend(['--hash-alg', HASH_ALGORITHM])
- out = subprocess.check_output(cmd, universal_newlines=True).strip()
- return bytes(bytearray.fromhex(out))
-
-if __name__ == '__main__':
- p = argparse.ArgumentParser()
- p.add_argument(
- '--output',
- help='Path to the output manifest APK',
- required=True)
- p.add_argument(
- '--fsverity-path',
- help='path to the fsverity program',
- required=True)
- p.add_argument(
- '--aapt2-path',
- help='path to the aapt2 program',
- required=True)
- p.add_argument(
- '--min-sdk-version',
- help='minimum supported sdk version of the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-code',
- help='version code for the generated manifest apk',
- required=True)
- p.add_argument(
- '--version-name',
- help='version name for the generated manifest apk',
- required=True)
- p.add_argument(
- '--framework-res',
- help='path to framework-res.apk',
- required=True)
- p.add_argument(
- '--apksigner-path',
- help='path to the apksigner program',
- required=True)
- p.add_argument(
- '--apk-key-path',
- help='path to the apk key',
- required=True)
- p.add_argument(
- '--apk-manifest-path',
- help='path to AndroidManifest.xml',
- required=True)
- p.add_argument(
- '--base-dir',
- help='directory to use as a relative root for the inputs',
- required=True)
- p.add_argument(
- 'inputs',
- nargs='+',
- help='input file for the build manifest')
- args = p.parse_args(sys.argv[1:])
-
- digests = FSVerityDigests()
- for f in sorted(args.inputs):
- # f is a full path for now; make it relative so it starts with {mount_point}/
- digest = digests.digests[os.path.relpath(f, args.base_dir)]
- digest.digest = _digest(args.fsverity_path, f)
- digest.hash_alg = HASH_ALGORITHM
-
- temp_dir = common.MakeTempDir()
-
- os.mkdir(os.path.join(temp_dir, "assets"))
- metadata_path = os.path.join(temp_dir, "assets", "build_manifest.pb")
- with open(metadata_path, "wb") as f:
- f.write(digests.SerializeToString())
-
- common.RunAndCheckOutput([args.aapt2_path, "link",
- "-A", os.path.join(temp_dir, "assets"),
- "-o", args.output,
- "--min-sdk-version", args.min_sdk_version,
- "--version-code", args.version_code,
- "--version-name", args.version_name,
- "-I", args.framework_res,
- "--manifest", args.apk_manifest_path])
- common.RunAndCheckOutput([args.apksigner_path, "sign", "--in", args.output,
- "--cert", args.apk_key_path + ".x509.pem",
- "--key", args.apk_key_path + ".pk8"])
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index ef1dca232c..12acc138c7 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -16,6 +16,7 @@ import copy
import itertools
import logging
import os
+import shutil
import struct
import zipfile
@@ -119,7 +120,7 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
# Re-sign the package after updating the metadata entry.
if OPTIONS.no_signing:
- output_file = prelim_signing
+ shutil.copy(prelim_signing, output_file)
else:
SignOutput(prelim_signing, output_file)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 40bd6a775e..ae72430aa1 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -27,7 +27,7 @@ Usage: sign_target_files_apks [flags] input_target_files output_target_files
apkcerts.txt file, or the container key for an APEX. Option may be
repeated to give multiple extra packages.
- --extra_apex_payload_key <name=key>
+ --extra_apex_payload_key <name,name,...=key>
Add a mapping for APEX package name to payload signing key, which will
override the default payload signing key in apexkeys.txt. Note that the
container key should be overridden via the `--extra_apks` flag above.
@@ -141,6 +141,12 @@ Usage: sign_target_files_apks [flags] input_target_files output_target_files
Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
(/system/system_ext|/system_ext)/etc/selinux.
If not set, error out when the file exists.
+
+ --override_apk_keys <path>
+ Replace all APK keys with this private key
+
+ --override_apex_keys <path>
+ Replace all APEX keys with this private key
"""
from __future__ import print_function
@@ -197,6 +203,8 @@ OPTIONS.android_jar_path = None
OPTIONS.vendor_partitions = set()
OPTIONS.vendor_otatools = None
OPTIONS.allow_gsi_debug_sepolicy = False
+OPTIONS.override_apk_keys = None
+OPTIONS.override_apex_keys = None
AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -245,6 +253,10 @@ def GetApexFilename(filename):
def GetApkCerts(certmap):
+ if OPTIONS.override_apk_keys is not None:
+ for apk in certmap.keys():
+ certmap[apk] = OPTIONS.override_apk_keys
+
# apply the key remapping to the contents of the file
for apk, cert in certmap.items():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -275,6 +287,15 @@ def GetApexKeys(keys_info, key_map):
Raises:
AssertionError: On invalid container / payload key overrides.
"""
+ if OPTIONS.override_apex_keys is not None:
+ for apex in keys_info.keys():
+ keys_info[apex] = (OPTIONS.override_apex_keys, keys_info[apex][1], keys_info[apex][2])
+
+ if OPTIONS.override_apk_keys is not None:
+ key = key_map.get(OPTIONS.override_apk_keys, OPTIONS.override_apk_keys)
+ for apex in keys_info.keys():
+ keys_info[apex] = (keys_info[apex][0], key, keys_info[apex][2])
+
# Apply all the --extra_apex_payload_key options to override the payload
# signing keys in the given keys_info.
for apex, key in OPTIONS.extra_apex_payload_keys.items():
@@ -1380,8 +1401,9 @@ def main(argv):
for n in names:
OPTIONS.extra_apks[n] = key
elif o == "--extra_apex_payload_key":
- apex_name, key = a.split("=")
- OPTIONS.extra_apex_payload_keys[apex_name] = key
+ apex_names, key = a.split("=")
+ for name in apex_names.split(","):
+ OPTIONS.extra_apex_payload_keys[name] = key
elif o == "--skip_apks_with_path_prefix":
# Check the prefix, which must be in all upper case.
prefix = a.split('/')[0]
@@ -1484,6 +1506,10 @@ def main(argv):
OPTIONS.vendor_partitions = set(a.split(","))
elif o == "--allow_gsi_debug_sepolicy":
OPTIONS.allow_gsi_debug_sepolicy = True
+ elif o == "--override_apk_keys":
+ OPTIONS.override_apk_keys = a
+ elif o == "--override_apex_keys":
+ OPTIONS.override_apex_keys = a
else:
return False
return True
@@ -1537,6 +1563,8 @@ def main(argv):
"vendor_partitions=",
"vendor_otatools=",
"allow_gsi_debug_sepolicy",
+ "override_apk_keys=",
+ "override_apex_keys=",
],
extra_option_handler=option_handler)