diff --git a/core/Makefile b/core/Makefile index 5badde5587e..c5ce072079a 100644 --- a/core/Makefile +++ b/core/Makefile @@ -1,5 +1,76 @@ # Put some miscellaneous rules here +ifneq ($(BUILD_WITH_COLORS),0) + CL_RED="\033[31m" + CL_GRN="\033[32m" + CL_YLW="\033[33m" + CL_BLU="\033[34m" + CL_MAG="\033[35m" + CL_CYN="\033[36m" + CL_RST="\033[0m" + + # Let's be paranoid and make sure the 'bc' command is available. + # It would be silly to have an unimportant make error for this + # after a long build has completed successfully. If this check + # fails then all the CL_CHARn variables will be null, which will + # cause the default system color to be used. + + CHECK_BC := $(shell which bc > /dev/null; echo $$?) + ifeq ($(CHECK_BC),0) + # RANDOM_VAL gets a psuedo random based on the Linux $RANDOM + # environment variable. If that returns an empty value then we + # assume it is not supported and just use the current number of + # seconds, which works, but is not as interesting. + + ifeq ($(shell echo $${RANDOM}),) + RANDOM_VAL := $(shell echo `date +%s`) + else + RANDOM_VAL = $(shell echo $${RANDOM}) + endif + + # Random colors for use when displaying the completion logo + # that says "AICP ROM" (7 characters, excluding white space). + # In each definition we use RANDOM_VAL twice + # - The 1st gives 0 or 1 (normal or highlighted) + # - The 2nd gives 31..36 (RED, GRN, YLW, BLU, MAG, CYN) + + # Calculate the 2 random values (highlight and color) for each + # of the 7 character we will write. The only reason for the + # first mod (%) is in case we are using seconds instead of RANDOM. + # That at least makes things slightly more interesting. + CL_RANDOM_01 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_02 := $(shell echo ${RANDOM_VAL} % 11 % 6 + 31 | bc ) + + CL_RANDOM_03 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_04 := $(shell echo ${RANDOM_VAL} % 19 % 6 + 31 | bc ) + + CL_RANDOM_05 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_06 := $(shell echo ${RANDOM_VAL} % 31 % 6 + 31 | bc ) + + CL_RANDOM_07 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_08 := $(shell echo ${RANDOM_VAL} % 43 % 6 + 31 | bc ) + + CL_RANDOM_09 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_10 := $(shell echo ${RANDOM_VAL} % 59 % 6 + 31 | bc ) + + CL_RANDOM_11 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_12 := $(shell echo ${RANDOM_VAL} % 71 % 6 + 31 | bc ) + + CL_RANDOM_13 := $(shell echo ${RANDOM_VAL} % 2 | bc) + CL_RANDOM_14 := $(shell echo ${RANDOM_VAL} % 83 % 6 + 31 | bc ) + + + # Now we finally define the color codes for each character. + CL_CHAR1 := "\033[$(CL_RANDOM_01);$(CL_RANDOM_02)m" + CL_CHAR2 := "\033[$(CL_RANDOM_03);$(CL_RANDOM_04)m" + CL_CHAR3 := "\033[$(CL_RANDOM_05);$(CL_RANDOM_06)m" + CL_CHAR4 := "\033[$(CL_RANDOM_07);$(CL_RANDOM_08)m" + CL_CHAR5 := "\033[$(CL_RANDOM_09);$(CL_RANDOM_10)m" + CL_CHAR6 := "\033[$(CL_RANDOM_11);$(CL_RANDOM_12)m" + CL_CHAR7 := "\033[$(CL_RANDOM_13);$(CL_RANDOM_14)m" + endif +endif + # HACK: clear LOCAL_PATH from including last build target before calling # intermedites-dir-for LOCAL_PATH := $(BUILD_SYSTEM) @@ -34,7 +105,6 @@ unique_product_copy_files_destinations := $(foreach cf,$(unique_product_copy_files_pairs), \ $(eval _src := $(call word-colon,1,$(cf))) \ $(eval _dest := $(call word-colon,2,$(cf))) \ - $(call check-product-copy-files,$(cf)) \ $(if $(filter $(unique_product_copy_files_destinations),$(_dest)), \ $(info PRODUCT_COPY_FILES $(cf) ignored.), \ $(eval _fulldest := $(call append-path,$(PRODUCT_OUT),$(_dest))) \ @@ -96,7 +166,6 @@ $(INSTALLED_DEFAULT_PROP_TARGET): $(intermediate_system_build_prop) echo "#" >> $@; $(hide) echo ro.bootimage.build.date=`$(DATE_FROM_FILE)`>>$@ $(hide) echo ro.bootimage.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@ - $(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@ $(hide) build/tools/post_process_props.py $@ # ----------------------------------------------------------------- @@ -127,8 +196,22 @@ endif BUILD_VERSION_TAGS += $(BUILD_KEYS) BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS))) +# If the final fingerprint should be different than what was used by the build system, +# we can allow that too. +ifeq ($(TARGET_VENDOR_PRODUCT_NAME),) +TARGET_VENDOR_PRODUCT_NAME := $(TARGET_PRODUCT) +endif + +ifeq ($(TARGET_VENDOR_DEVICE_NAME),) +TARGET_VENDOR_DEVICE_NAME := $(TARGET_DEVICE) +endif + +ifeq ($(TARGET_VENDOR_RELEASE_BUILD_ID),) +TARGET_VENDOR_RELEASE_BUILD_ID := $(BUILD_NUMBER_FROM_FILE) +endif + # A human-readable string that descibes this build in detail. -build_desc := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(BUILD_NUMBER_FROM_FILE) $(BUILD_VERSION_TAGS) +build_desc := $(TARGET_VENDOR_PRODUCT_NAME)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(TARGET_VENDOR_RELEASE_BUILD_ID) $(BUILD_VERSION_TAGS) $(intermediate_system_build_prop): PRIVATE_BUILD_DESC := $(build_desc) # The string used to uniquely identify the combined build and product; used by the OTA server. @@ -140,7 +223,10 @@ ifeq (,$(strip $(BUILD_FINGERPRINT))) else BF_BUILD_NUMBER := $(BUILD_NUMBER) endif - BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) + ifneq ($(TARGET_VENDOR_RELEASE_BUILD_ID),) + BF_BUILD_NUMBER := $(TARGET_VENDOR_RELEASE_BUILD_ID) + endif + BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_VENDOR_PRODUCT_NAME)/$(TARGET_VENDOR_DEVICE_NAME):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS) endif ifneq ($(words $(BUILD_FINGERPRINT)),1) $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)") @@ -215,8 +301,9 @@ ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES),) endif $(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \ TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \ - TARGET_DEVICE="$(TARGET_DEVICE)" \ - PRODUCT_NAME="$(TARGET_PRODUCT)" \ + TARGET_DEVICE="$(TARGET_VENDOR_DEVICE_NAME)" \ + AICP_DEVICE="$(TARGET_DEVICE)" \ + PRODUCT_NAME="$(TARGET_VENDOR_PRODUCT_NAME)" \ PRODUCT_BRAND="$(PRODUCT_BRAND)" \ PRODUCT_DEFAULT_LOCALE="$(call get-default-product-locale,$(PRODUCT_LOCALES))" \ PRODUCT_DEFAULT_WIFI_CHANNELS="$(PRODUCT_DEFAULT_WIFI_CHANNELS)" \ @@ -247,6 +334,10 @@ endif TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \ TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \ TARGET_AAPT_CHARACTERISTICS="$(TARGET_AAPT_CHARACTERISTICS)" \ + TARGET_UNIFIED_DEVICE="$(TARGET_UNIFIED_DEVICE)" \ + TARGET_SKIP_DEFAULT_LOCALE="$(TARGET_SKIP_DEFAULT_LOCALE)" \ + TARGET_SKIP_PRODUCT_DEVICE="$(TARGET_SKIP_PRODUCT_DEVICE)" \ + $(PRODUCT_BUILD_PROP_OVERRIDES) \ bash $(BUILDINFO_SH) >> $@ $(hide) $(foreach file,$(system_prop_file), \ if [ -f "$(file)" ]; then \ @@ -260,11 +351,12 @@ endif $(hide) echo >> $@; \ echo "#" >> $@; \ echo "# ADDITIONAL_BUILD_PROPERTIES" >> $@; \ - echo "#" >> $@; ) + echo "#" >> $@; \ + echo "# modversion this line enables Tasker root support" >> $@; ) $(hide) $(foreach line,$(ADDITIONAL_BUILD_PROPERTIES), \ echo "$(line)" >> $@;) $(hide) cat $(INSTALLED_ANDROID_INFO_TXT_TARGET) | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' >> $@ - $(hide) build/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST) + $(hide) build/tools/post_process_props.py $@ "$(PRODUCT_PROPERTY_UBER_OVERRIDES)" $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST) build_desc := @@ -277,6 +369,7 @@ endif $(INSTALLED_BUILD_PROP_TARGET): $(intermediate_system_build_prop) $(INSTALLED_RECOVERYIMAGE_TARGET) @echo "Target build info: $@" $(hide) cat $(intermediate_system_build_prop) > $@ + $(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@ ifdef INSTALLED_RECOVERYIMAGE_TARGET $(hide) echo ro.expect.recovery_id=`cat $(RECOVERYIMAGE_ID_FILE)` >> $@ endif @@ -393,6 +486,9 @@ endif # exist with the suffixes ".x509.pem" and ".pk8". DEFAULT_KEY_CERT_PAIR := $(DEFAULT_SYSTEM_DEV_CERTIFICATE) +ifneq ($(OTA_PACKAGE_SIGNING_KEY),) + DEFAULT_KEY_CERT_PAIR := $(OTA_PACKAGE_SIGNING_KEY) +endif # Rules that need to be present for the all targets, even # if they don't do anything. @@ -467,6 +563,18 @@ endif # ----------------------------------------------------------------- # the ramdisk +BOOT_RAMDISK_COMPRESSOR := $(MINIGZIP) +RECOVERY_RAMDISK_COMPRESSOR := $(MINIGZIP) +ifneq ($(LZMA_RAMDISK_TARGETS),) + ifneq (,$(findstring boot,$(LZMA_RAMDISK_TARGETS))) + BOOT_RAMDISK_COMPRESSOR := lzma -f -c + endif + ifneq (,$(findstring recovery,$(LZMA_RAMDISK_TARGETS))) + RECOVERY_RAMDISK_COMPRESSOR := lzma -f -c + TARGET_NOT_USE_GZIP_RECOVERY_RAMDISK := true + endif +endif + INTERNAL_RAMDISK_FILES := $(filter $(TARGET_ROOT_OUT)/%, \ $(ALL_PREBUILT) \ $(ALL_GENERATED_SOURCES) \ @@ -478,12 +586,12 @@ BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET) $(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) | $(MINIGZIP) $(call pretty,"Target ram disk: $@") - $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(MINIGZIP) > $@ + $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(BOOT_RAMDISK_COMPRESSOR) > $@ .PHONY: ramdisk-nodeps ramdisk-nodeps: $(MKBOOTFS) | $(MINIGZIP) @echo "make $@: ignoring dependencies" - $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(MINIGZIP) > $(INSTALLED_RAMDISK_TARGET) + $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_ROOT_OUT) | $(BOOT_RAMDISK_COMPRESSOR) > $(INSTALLED_RAMDISK_TARGET) ifneq ($(strip $(TARGET_NO_KERNEL)),true) @@ -525,6 +633,13 @@ INTERNAL_MKBOOTIMG_VERSION_ARGS := \ --os_version $(PLATFORM_VERSION) \ --os_patch_level $(PLATFORM_SECURITY_PATCH) +INSTALLED_DTIMAGE_TARGET := $(PRODUCT_OUT)/dt.img + +ifeq ($(strip $(BOARD_KERNEL_SEPARATED_DT)),true) + INTERNAL_BOOTIMAGE_ARGS += --dt $(INSTALLED_DTIMAGE_TARGET) + BOOTIMAGE_EXTRA_DEPS := $(INSTALLED_DTIMAGE_TARGET) +endif + INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img # BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true. @@ -540,11 +655,12 @@ ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true) $(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore) else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)) # TARGET_BOOTIMAGE_USE_EXT2 != true -$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER) +$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER) $(BOOTIMAGE_EXTRA_DEPS) $(call pretty,"Target boot image: $@") $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@ $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@ $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" .PHONY: bootimage-nodeps bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER) @@ -552,14 +668,18 @@ bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER) $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET) $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET) $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" + +else ifndef BOARD_CUSTOM_BOOTIMG_MK -else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true + ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true -$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) +$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(BOOTIMAGE_EXTRA_DEPS) $(call pretty,"Target boot image: $@") $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned $(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@ $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" .PHONY: bootimage-nodeps bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) @@ -567,21 +687,25 @@ bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET) $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" -else # PRODUCT_SUPPORTS_VBOOT != true + else # PRODUCT_SUPPORTS_VBOOT != true -$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) +$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOTIMAGE_EXTRA_DEPS) $(call pretty,"Target boot image: $@") $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@ $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" .PHONY: bootimage-nodeps bootimage-nodeps: $(MKBOOTIMG) @echo "make $@: ignoring dependencies" $(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET) $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE)) + @echo "Made boot image: $@" -endif # TARGET_BOOTIMAGE_USE_EXT2 + endif # PRODUCT_SUPPORTS_VBOOT +endif # TARGET_BOOTIMAGE_USE_EXT2 / BOARD_CUSTOM_BOOTIMG_MK endif # BOARD_USES_RECOVERY_AS_BOOT else # TARGET_NO_KERNEL @@ -686,7 +810,7 @@ endif # TARGET_BUILD_APPS $(kernel_notice_file): \ $(BUILD_SYSTEM)/LINUX_KERNEL_COPYING \ | $(ACP) - @echo Copying: $@ + @echo "Copying:"" $@" $(hide) mkdir -p $(dir $@) $(hide) $(ACP) $< $@ @@ -745,6 +869,15 @@ INTERNAL_USERIMAGES_EXT_VARIANT := ext4 endif endif endif +ifeq ($(TARGET_USERIMAGES_USE_F2FS),true) +INTERNAL_USERIMAGES_USE_F2FS := true +ifeq ($(INTERNAL_USERIMAGES_EXT_VARIANT),) +INTERNAL_USERIMAGES_EXT_VARIANT := f2fs +endif +endif +ifeq ($(TARGET_USERIMAGES_USE_YAFFS),true) +INTERNAL_USERIMAGES_USE_YAFFS := true +endif # These options tell the recovery updater/installer how to mount the partitions writebale. # =[|]... @@ -757,12 +890,15 @@ ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)) INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s endif +INTERNAL_USERIMAGES_DEPS := ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true) -INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG) INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK) -ifeq ($(TARGET_USERIMAGES_USE_F2FS),true) +endif +ifeq ($(INTERNAL_USERIMAGES_USE_F2FS),true) INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS) endif +ifeq ($(INTERNAL_USERIMAGES_USE_YAFFS),true) +INTERNAL_USERIMAGES_DEPS += $(MKYAFFS2) endif ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)) @@ -772,6 +908,8 @@ ifneq ($(filter $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_S INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG) endif +INTERNAL_USERIMAGES_DEPS += $(SIMG2IMG) + INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS))) ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)) @@ -792,6 +930,7 @@ define generate-userimage-prop-dictionary $(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1)) $(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1)) $(if $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "system_fs_type=$(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE)" >> $(1)) +$(if $(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "system_extfs_inode_count=$(BOARD_SYSTEMIMAGE_EXTFS_INODE_COUNT)" >> $(1)) $(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1)) $(if $(BOARD_HAS_EXT4_RESERVED_BLOCKS),$(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(1)) $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1)) @@ -801,9 +940,12 @@ $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "system_squashf $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1)) $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1)) $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1)) +$(if $(BOARD_USERDATAEXTRAIMAGE_PARTITION_SIZE),$(hide) echo "userdataextra_size=$(BOARD_USERDATAEXTRAIMAGE_PARTITION_SIZE)" >> $(1)) +$(if $(BOARD_USERDATAEXTRAIMAGE_PARTITION_NAME),$(hide) echo "userdataextra_name=$(BOARD_USERDATAEXTRAIMAGE_PARTITION_NAME)" >> $(1)) $(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1)) $(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1)) $(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1)) +$(if $(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT),$(hide) echo "vendor_extfs_inode_count=$(BOARD_VENDORIMAGE_EXTFS_INODE_COUNT)" >> $(1)) $(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1)) $(if $(BOARD_VENDORIMAGE_JOURNAL_SIZE),$(hide) echo "vendor_journal_size=$(BOARD_VENDORIMAGE_JOURNAL_SIZE)" >> $(1)) $(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "vendor_squashfs_compressor=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR)" >> $(1)) @@ -813,7 +955,9 @@ $(if $(BOARD_VENDORIMAGE_SQUASHFS_DISABLE_4K_ALIGN),$(hide) echo "vendor_squashf $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1)) $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1)) $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1)) +$(if $(BOARD_OEMIMAGE_EXTFS_INODE_COUNT),$(hide) echo "oem_extfs_inode_count=$(BOARD_OEMIMAGE_EXTFS_INODE_COUNT)" >> $(1)) $(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1)) +$(if $(mkyaffs2_extra_flags),$(hide) echo "mkyaffs2_extra_flags=$(mkyaffs2_extra_flags)" >> $(1)) $(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1)) $(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1) $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1)) @@ -845,15 +989,19 @@ ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_ INTERNAL_RECOVERYIMAGE_FILES := $(filter $(TARGET_RECOVERY_OUT)/%, \ $(ALL_DEFAULT_INSTALLED_MODULES)) -recovery_initrc := $(call include-path-for, recovery)/etc/init.rc +recovery_initrc := $(call project-path-for,recovery)/etc/init.rc recovery_sepolicy := $(call intermediates-dir-for,ETC,sepolicy.recovery)/sepolicy.recovery recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img +recovery_uncompressed_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.cpio recovery_build_prop := $(intermediate_system_build_prop) -recovery_resources_common := $(call include-path-for, recovery)/res +recovery_resources_common := $(call project-path-for,recovery)/res # Set recovery_density to the density bucket of the device. recovery_density := unknown +ifneq (,$(TARGET_RECOVERY_DENSITY)) +recovery_density := $(filter %dpi,$(TARGET_RECOVERY_DENSITY)) +else ifneq (,$(PRODUCT_AAPT_PREF_CONFIG)) # If PRODUCT_AAPT_PREF_CONFIG includes a dpi bucket, then use that value. recovery_density := $(filter %dpi,$(PRODUCT_AAPT_PREF_CONFIG)) @@ -861,6 +1009,7 @@ else # Otherwise, use the default medium density. recovery_densities := mdpi endif +endif ifneq (,$(wildcard $(recovery_resources_common)-$(recovery_density))) recovery_resources_common := $(recovery_resources_common)-$(recovery_density) @@ -868,22 +1017,37 @@ else recovery_resources_common := $(recovery_resources_common)-xhdpi endif +ifneq (,$(RECOVERY_EXTRA_RESOURCE_DIR)) +recovery_resources_extra := $(strip \ + $(wildcard $(RECOVERY_EXTRA_RESOURCE_DIR)/res-$(recovery_density))) +endif + # Select the 18x32 font on high-density devices (xhdpi and up); and # the 12x22 font on other devices. Note that the font selected here # can be overridden for a particular device by putting a font.png in # its private recovery resources. ifneq (,$(filter xxxhdpi 560dpi xxhdpi 400dpi xhdpi,$(recovery_density))) -recovery_font := $(call include-path-for, recovery)/fonts/18x32.png +recovery_font := $(call project-path-for,recovery)/fonts/18x32.png +else +recovery_font := $(call project-path-for,recovery)/fonts/12x22.png +endif + +ifneq ($(TARGET_RECOVERY_DEVICE_DIRS),) +recovery_root_private := $(strip \ + $(foreach d,$(TARGET_RECOVERY_DEVICE_DIRS), $(wildcard $(d)/recovery/root))) else -recovery_font := $(call include-path-for, recovery)/fonts/12x22.png +recovery_root_private := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery/root)) +endif +ifneq ($(recovery_root_private),) +recovery_root_deps := $(shell find $(recovery_root_private) -type f) endif ifndef TARGET_PRIVATE_RES_DIRS TARGET_PRIVATE_RES_DIRS := $(wildcard $(TARGET_DEVICE_DIR)/recovery/res) endif recovery_resource_deps := $(shell find $(recovery_resources_common) \ - $(TARGET_PRIVATE_RES_DIRS) -type f) + $(recovery_resources_extra) $(TARGET_PRIVATE_RES_DIRS) -type f) ifdef TARGET_RECOVERY_FSTAB recovery_fstab := $(TARGET_RECOVERY_FSTAB) else @@ -907,17 +1071,13 @@ endif # b) We build a single image that contains boot and recovery both # (BOARD_USES_RECOVERY_AS_BOOT = true). -ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT))) +ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) $(TARGET_NOT_USE_GZIP_RECOVERY_RAMDISK))) # Named '.dat' so we don't attempt to use imgdiff for patching it. RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat else RECOVERY_RESOURCE_ZIP := endif -ifeq ($(TARGET_PRIVATE_RES_DIRS),) - $(info No private recovery resources for TARGET_DEVICE $(TARGET_DEVICE)) -endif - ifeq ($(recovery_fstab),) $(info No recovery.fstab for TARGET_DEVICE $(TARGET_DEVICE)) endif @@ -929,6 +1089,11 @@ INTERNAL_RECOVERYIMAGE_ARGS := \ # Assumes this has already been stripped ifdef BOARD_KERNEL_CMDLINE + ifdef BUILD_ENFORCE_SELINUX + ifneq (,$(filter androidboot.selinux=permissive androidboot.selinux=disabled, $(BOARD_KERNEL_CMDLINE))) + $(error "Trying to apply non-default selinux settings. Aborting") + endif + endif INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)" endif ifdef BOARD_KERNEL_BASE @@ -938,12 +1103,25 @@ BOARD_KERNEL_PAGESIZE := $(strip $(BOARD_KERNEL_PAGESIZE)) ifdef BOARD_KERNEL_PAGESIZE INTERNAL_RECOVERYIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE) endif +ifeq ($(strip $(BOARD_KERNEL_SEPARATED_DT)),true) + INTERNAL_RECOVERYIMAGE_ARGS += --dt $(INSTALLED_DTIMAGE_TARGET) + RECOVERYIMAGE_EXTRA_DEPS := $(INSTALLED_DTIMAGE_TARGET) +endif # Keys authorized to sign OTA packages this build will accept. The # build always uses dev-keys for this; release packaging tools will # substitute other keys for this one. OTA_PUBLIC_KEYS := $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem +ifneq ($(OTA_PACKAGE_SIGNING_KEY),) + OTA_PUBLIC_KEYS := $(OTA_PACKAGE_SIGNING_KEY).x509.pem + PRODUCT_EXTRA_RECOVERY_KEYS := $(DEFAULT_SYSTEM_DEV_CERTIFICATE) +else + PRODUCT_EXTRA_RECOVERY_KEYS += \ + build/target/product/security/aicp \ + build/target/product/security/aicp-devkey +endif + # Generate a file containing the keys that will be read by the # recovery binary. RECOVERY_INSTALL_OTA_KEYS := \ @@ -957,15 +1135,13 @@ $(RECOVERY_INSTALL_OTA_KEYS): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR) $(extra_keys) @mkdir -p $(dir $@) java -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) $(extra_keys) > $@ -RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id -# $(1): output file -define build-recoveryimage-target - @echo ----- Making recovery image ------ +define build-recoveryramdisk + @echo "----- Making recovery ramdisk ------" $(hide) mkdir -p $(TARGET_RECOVERY_OUT) $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp - @echo Copying baseline ramdisk... - $(hide) rsync -a --exclude=etc --exclude=sdcard $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac. - @echo Modifying ramdisk contents... + @echo "Copying baseline ramdisk..." + $(hide) rsync -a --exclude=etc --exclude=sdcard --exclude=vendor $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac. + @echo "Modifying ramdisk contents..." $(hide) rm -f $(TARGET_RECOVERY_ROOT_OUT)/init*.rc $(hide) cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/ $(hide) rm -f $(TARGET_RECOVERY_ROOT_OUT)/sepolicy @@ -974,9 +1150,13 @@ define build-recoveryimage-target $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res $(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/* $(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res + $(hide) $(foreach item,$(recovery_resources_extra), \ + cp -rf $(item)/* $(TARGET_RECOVERY_ROOT_OUT)/res;) $(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png + $(hide) $(foreach item,$(recovery_root_private), \ + cp -rf $(item) $(TARGET_RECOVERY_OUT)/;) $(hide) $(foreach item,$(TARGET_PRIVATE_RES_DIRS), \ - cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline)) + cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline);) $(hide) $(foreach item,$(recovery_fstab), \ cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/etc/recovery.fstab) $(if $(strip $(recovery_wipe)), \ @@ -989,7 +1169,11 @@ define build-recoveryimage-target $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system_root; \ rm -rf $(TARGET_RECOVERY_ROOT_OUT)/system; \ ln -sf /system_root/system $(TARGET_RECOVERY_ROOT_OUT)/system) # Mount the system_root_image to /system_root and symlink /system. - $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk) +endef + +RECOVERYIMAGE_ID_FILE := $(PRODUCT_OUT)/recovery.id +# $(1): output file +define build-recoveryimage-target $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \ $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned, \ $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1) --id > $(RECOVERYIMAGE_ID_FILE)) @@ -1004,7 +1188,7 @@ define build-recoveryimage-target $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \ $(hide) $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE)), \ $(hide) $(call assert-max-image-size,$(1),$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))) - @echo ----- Made recovery image: $(1) -------- + @echo "Made recovery image: $@" endef ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true) @@ -1022,20 +1206,87 @@ $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \ $(recovery_build_prop) $(recovery_resource_deps) \ $(recovery_fstab) \ $(RECOVERY_INSTALL_OTA_KEYS) - $(call pretty,"Target boot image from recovery: $@") - $(call build-recoveryimage-target, $@) + $(call pretty,"Target boot image from recovery: $@") + $(call build-recoveryramdisk) + $(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) > $(recovery_uncompressed_ramdisk) + $(hide) $(RECOVERY_RAMDISK_COMPRESSOR) < $(recovery_uncompressed_ramdisk) > $(recovery_ramdisk) + $(call build-recoveryimage-target, $@) endif -$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \ +$(recovery_uncompressed_ramdisk): $(MKBOOTFS) \ $(INSTALLED_RAMDISK_TARGET) \ $(INSTALLED_BOOTIMAGE_TARGET) \ $(INTERNAL_RECOVERYIMAGE_FILES) \ - $(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \ + $(recovery_initrc) $(recovery_sepolicy) \ $(INSTALLED_2NDBOOTLOADER_TARGET) \ - $(recovery_build_prop) $(recovery_resource_deps) \ + $(recovery_build_prop) $(recovery_resource_deps) $(recovery_root_deps) \ $(recovery_fstab) \ $(RECOVERY_INSTALL_OTA_KEYS) - $(call build-recoveryimage-target, $@) + $(call build-recoveryramdisk) + @echo "----- Making uncompressed recovery ramdisk ------" + $(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) > $@ + +$(recovery_ramdisk): $(MINIGZIP) \ + $(recovery_uncompressed_ramdisk) + @echo "----- Making compressed recovery ramdisk ------" + $(hide) $(RECOVERY_RAMDISK_COMPRESSOR) < $(recovery_uncompressed_ramdisk) > $@ + +ifndef BOARD_CUSTOM_BOOTIMG_MK +$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel) \ + $(RECOVERYIMAGE_EXTRA_DEPS) + @echo "----- Making recovery image ------" + $(call build-recoveryimage-target, $@) +endif # BOARD_CUSTOM_BOOTIMG_MK + +# The system partition needs room for the recovery image as well. We +# now store the recovery image as a binary patch using the boot image +# as the source (since they are very similar). Generate the patch so +# we can see how big it's going to be, and include that in the system +# image size check calculation. +ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),) +intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch) +ifndef BOARD_CANT_BUILD_RECOVERY_FROM_BOOT_PATCH +RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p +else +RECOVERY_FROM_BOOT_PATCH := +endif +$(RECOVERY_FROM_BOOT_PATCH): $(INSTALLED_RECOVERYIMAGE_TARGET) \ + $(INSTALLED_BOOTIMAGE_TARGET) \ + $(HOST_OUT_EXECUTABLES)/imgdiff \ + $(HOST_OUT_EXECUTABLES)/bsdiff + @echo -e ${CL_CYN}"Construct recovery from boot"${CL_RST} + mkdir -p $(dir $@) +ifeq ($(TARGET_NOT_USE_GZIP_RECOVERY_RAMDISK),true) + PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/bsdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@ +else + PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/imgdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@ +endif +endif + +# The system partition needs room for the recovery image as well. We +# now store the recovery image as a binary patch using the boot image +# as the source (since they are very similar). Generate the patch so +# we can see how big it's going to be, and include that in the system +# image size check calculation. +ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),) +intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch) +ifndef BOARD_CANT_BUILD_RECOVERY_FROM_BOOT_PATCH +RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p +else +RECOVERY_FROM_BOOT_PATCH := +endif +$(RECOVERY_FROM_BOOT_PATCH): $(INSTALLED_RECOVERYIMAGE_TARGET) \ + $(INSTALLED_BOOTIMAGE_TARGET) \ + $(HOST_OUT_EXECUTABLES)/imgdiff \ + $(HOST_OUT_EXECUTABLES)/bsdiff + @echo -e ${CL_CYN}"Construct recovery from boot"${CL_RST} + mkdir -p $(dir $@) +ifeq ($(TARGET_NOT_USE_GZIP_RECOVERY_RAMDISK),true) + PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/bsdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@ +else + PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/imgdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@ +endif +endif ifdef RECOVERY_RESOURCE_ZIP $(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME) @@ -1047,6 +1298,9 @@ endif .PHONY: recoveryimage-nodeps recoveryimage-nodeps: @echo "make $@: ignoring dependencies" + $(call build-recoveryramdisk) + $(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) > $(recovery_uncompressed_ramdisk) + $(hide) $(RECOVERY_RAMDISK_COMPRESSOR) < $(recovery_uncompressed_ramdisk) > $(recovery_ramdisk) $(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET)) else # INSTALLED_RECOVERYIMAGE_TARGET not defined @@ -1056,14 +1310,32 @@ endif .PHONY: recoveryimage recoveryimage: $(INSTALLED_RECOVERYIMAGE_TARGET) $(RECOVERY_RESOURCE_ZIP) -ifeq ($(BOARD_NAND_PAGE_SIZE),) +INSTALLED_RECOVERYZIP_TARGET := $(PRODUCT_OUT)/utilities/update.zip +$(INSTALLED_RECOVERYZIP_TARGET): $(INSTALLED_RECOVERYIMAGE_TARGET) $(TARGET_OUT)/bin/updater + @echo ----- Making recovery zip ----- + ./build/tools/device/mkrecoveryzip.sh $(PRODUCT_OUT) $(HOST_OUT_JAVA_LIBRARIES)/signapk.jar + +.PHONY: recoveryzip +recoveryzip: $(INSTALLED_RECOVERYZIP_TARGET) + +ifneq ($(BOARD_NAND_PAGE_SIZE),) +mkyaffs2_extra_flags := -c $(BOARD_NAND_PAGE_SIZE) +else +mkyaffs2_extra_flags := BOARD_NAND_PAGE_SIZE := 2048 endif -ifeq ($(BOARD_NAND_SPARE_SIZE),) +ifneq ($(BOARD_NAND_SPARE_SIZE),) +mkyaffs2_extra_flags += -s $(BOARD_NAND_SPARE_SIZE) +else BOARD_NAND_SPARE_SIZE := 64 endif +ifdef BOARD_CUSTOM_BOOTIMG_MK +include $(BOARD_CUSTOM_BOOTIMG_MK) +endif + + # ----------------------------------------------------------------- # system image # @@ -1080,7 +1352,14 @@ INTERNAL_SYSTEMIMAGE_FILES := $(filter $(TARGET_OUT)/%, \ $(RECOVERY_RESOURCE_ZIP)) -FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS) +systemimage-changelog: $(INTERNAL_SYSTEMIMAGE_FILES) + @echo -e "Making changelog!" + $(hide) APKCERTS=$(APKCERTS_FILE) ./vendor/aicp/tools/changelog + +.PHONY: systemimage-changelog + +FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS) systemimage-changelog + # ----------------------------------------------------------------- # installed file list # Depending on anything that $(BUILT_SYSTEMIMAGE) depends on. @@ -1092,7 +1371,8 @@ $(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) @echo Installed file list: $@ @mkdir -p $(dir $@) @rm -f $@ - $(hide) build/tools/fileslist.py $(TARGET_OUT) > $@ + $(hide) build/tools/fileslist.py $(TARGET_OUT) > $(@:.txt=.json) + $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@ .PHONY: installed-file-list installed-file-list: $(INSTALLED_FILES_FILE) @@ -1118,6 +1398,18 @@ define create-system-vendor-symlink endef endif +# Only Create symlink /system/vendor to /vendor if necessary. +ifdef BOARD_NEEDS_VENDORIMAGE_SYMLINK +define create-system-vendor-symlink +$(hide) if [ -d $(TARGET_OUT)/vendor ] && [ ! -h $(TARGET_OUT)/vendor ]; then \ + echo 'Non-symlink $(TARGET_OUT)/vendor detected!' 1>&2; \ + echo 'You cannot install files to $(TARGET_OUT)/vendor while building a separate vendor.img!' 1>&2; \ + exit 1; \ +fi +$(hide) ln -sf /vendor $(TARGET_OUT)/vendor +endef +endif + # $(1): output file define build-systemimage-target @echo "Target system fs image: $(1)" @@ -1146,30 +1438,17 @@ endef $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(call build-systemimage-target,$@) -INSTALLED_SYSTEMIMAGE := $(PRODUCT_OUT)/system.img -SYSTEMIMAGE_SOURCE_DIR := $(TARGET_OUT) - -# The system partition needs room for the recovery image as well. We -# now store the recovery image as a binary patch using the boot image -# as the source (since they are very similar). Generate the patch so -# we can see how big it's going to be, and include that in the system -# image size check calculation. -ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),) -ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true) -intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch) -RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p -$(RECOVERY_FROM_BOOT_PATCH): $(INSTALLED_RECOVERYIMAGE_TARGET) \ - $(INSTALLED_BOOTIMAGE_TARGET) \ - $(HOST_OUT_EXECUTABLES)/imgdiff \ - $(HOST_OUT_EXECUTABLES)/bsdiff - @echo "Construct recovery from boot" - mkdir -p $(dir $@) - PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/imgdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@ -else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true -RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET) +ifndef I_WANT_A_QUAIL_STAR +ifneq ($(WITHOUT_CHECK_API),true) +$(BUILT_SYSTEMIMAGE): checkapi +ifneq ($(TARGET_DISABLE_CMSDK),true) +$(BUILT_SYSTEMIMAGE): checkapi-cm +endif endif endif +INSTALLED_SYSTEMIMAGE := $(PRODUCT_OUT)/system.img +SYSTEMIMAGE_SOURCE_DIR := $(TARGET_OUT) $(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH) | $(ACP) @echo "Install system fs image: $@" @@ -1495,7 +1774,8 @@ $(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) @echo Installed file list: $@ @mkdir -p $(dir $@) @rm -f $@ - $(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $@ + $(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $(@:.txt=.json) + $(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@ vendorimage_intermediates := \ $(call intermediates-dir-for,PACKAGING,vendor) @@ -1539,9 +1819,6 @@ build_ota_package := true ifeq ($(TARGET_SKIP_OTA_PACKAGE),true) build_ota_package := false endif -ifeq ($(BUILD_OS),darwin) -build_ota_package := false -endif ifneq ($(strip $(SANITIZE_TARGET)),) build_ota_package := false endif @@ -1564,9 +1841,12 @@ endif ifeq ($(build_ota_package),true) OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \ $(HOST_OUT_EXECUTABLES)/aapt \ + $(HOST_OUT_EXECUTABLES)/adb \ $(HOST_OUT_EXECUTABLES)/mkbootfs \ $(HOST_OUT_EXECUTABLES)/mkbootimg \ + $(HOST_OUT_EXECUTABLES)/unpackbootimg \ $(HOST_OUT_EXECUTABLES)/fs_config \ + $(HOST_OUT_EXECUTABLES)/mkyaffs2image \ $(HOST_OUT_EXECUTABLES)/zipalign \ $(HOST_OUT_EXECUTABLES)/bsdiff \ $(HOST_OUT_EXECUTABLES)/imgdiff \ @@ -1583,6 +1863,7 @@ OTATOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \ $(HOST_OUT_EXECUTABLES)/e2fsck \ $(HOST_OUT_EXECUTABLES)/build_verity_tree \ $(HOST_OUT_EXECUTABLES)/verity_signer \ + $(HOST_OUT_EXECUTABLES)/verity_verifier \ $(HOST_OUT_EXECUTABLES)/append2simg \ $(HOST_OUT_EXECUTABLES)/img2simg \ $(HOST_OUT_EXECUTABLES)/boot_signer \ @@ -1608,6 +1889,7 @@ OTATOOLS += \ $(HOST_LIBRARY_PATH)/libext2_profile-host$(HOST_SHLIB_SUFFIX) \ $(HOST_LIBRARY_PATH)/libext2_quota-host$(HOST_SHLIB_SUFFIX) \ $(HOST_LIBRARY_PATH)/libext2_uuid-host$(HOST_SHLIB_SUFFIX) \ + $(HOST_LIBRARY_PATH)/libf2fs_fmt_host_dyn$(HOST_SHLIB_SUFFIX) \ $(HOST_LIBRARY_PATH)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX) \ $(HOST_LIBRARY_PATH)/libbrillo$(HOST_SHLIB_SUFFIX) \ $(HOST_LIBRARY_PATH)/libbrillo-stream$(HOST_SHLIB_SUFFIX) \ @@ -1703,8 +1985,24 @@ else $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY endif +ifeq ($(BOARD_USES_UBOOT_MULTIIMAGE),true) + + ZIP_SAVE_UBOOTIMG_ARGS := -A ARM -O Linux -T multi -C none -n Image + + BOARD_UBOOT_ENTRY := $(strip $(BOARD_UBOOT_ENTRY)) + ifdef BOARD_UBOOT_ENTRY + ZIP_SAVE_UBOOTIMG_ARGS += -e $(BOARD_UBOOT_ENTRY) + endif + BOARD_UBOOT_LOAD := $(strip $(BOARD_UBOOT_LOAD)) + ifdef BOARD_UBOOT_LOAD + ZIP_SAVE_UBOOTIMG_ARGS += -a $(BOARD_UBOOT_LOAD) + endif + +endif + # Depending on the various images guarantees that the underlying # directories are up-to-date. +include $(BUILD_SYSTEM)/tasks/oem_image.mk $(BUILT_TARGET_FILES_PACKAGE): \ $(INSTALLED_BOOTIMAGE_TARGET) \ $(INSTALLED_RADIOIMAGE_TARGET) \ @@ -1713,13 +2011,14 @@ $(BUILT_TARGET_FILES_PACKAGE): \ $(INSTALLED_USERDATAIMAGE_TARGET) \ $(INSTALLED_CACHEIMAGE_TARGET) \ $(INSTALLED_VENDORIMAGE_TARGET) \ + $(INSTALLED_OEMIMAGE_TARGET) \ $(INSTALLED_SYSTEMOTHERIMAGE_TARGET) \ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \ $(SELINUX_FC) \ $(APKCERTS_FILE) \ $(HOST_OUT_EXECUTABLES)/fs_config \ | $(ACP) - @echo "Package target files: $@" + @echo "Package target files:"" $@" $(hide) rm -rf $@ $(zip_root) $(hide) mkdir -p $(dir $@) $(zip_root) ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) @@ -1727,6 +2026,8 @@ ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_ $(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT) $(hide) $(call package_files-copy-root, \ $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK) + @# OTA install helpers + $(hide) $(call package_files-copy-root, $(OUT)/install, $(zip_root)/INSTALL) ifdef INSTALLED_KERNEL_TARGET $(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel endif @@ -1734,6 +2035,9 @@ ifdef INSTALLED_2NDBOOTLOADER_TARGET $(hide) $(ACP) \ $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second endif +ifdef BOARD_KERNEL_TAGS_OFFSET + $(hide) echo "$(BOARD_KERNEL_TAGS_OFFSET)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/tags_offset +endif ifdef BOARD_KERNEL_CMDLINE $(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline endif @@ -1743,6 +2047,15 @@ endif ifdef BOARD_KERNEL_PAGESIZE $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize endif +ifdef BOARD_KERNEL_TAGS_ADDR + $(hide) echo "$(BOARD_KERNEL_TAGS_ADDR)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/tagsaddr +endif +ifdef BOARD_RAMDISK_OFFSET + $(hide) echo "$(BOARD_RAMDISK_OFFSET)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/ramdisk_offset +endif +ifeq ($(strip $(BOARD_KERNEL_SEPARATED_DT)),true) + $(hide) $(ACP) $(INSTALLED_DTIMAGE_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/dt +endif endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true @# Components of the boot image $(hide) mkdir -p $(zip_root)/BOOT @@ -1763,6 +2076,10 @@ ifdef INSTALLED_2NDBOOTLOADER_TARGET $(hide) $(ACP) \ $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second endif + +ifdef BOARD_KERNEL_TAGS_OFFSET + $(hide) echo "$(BOARD_KERNEL_TAGS_OFFSET)" > $(zip_root)/BOOT/tags_offset +endif ifdef BOARD_KERNEL_CMDLINE $(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline endif @@ -1772,21 +2089,48 @@ endif ifdef BOARD_KERNEL_PAGESIZE $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/BOOT/pagesize endif +ifdef BOARD_KERNEL_TAGS_ADDR + $(hide) echo "$(BOARD_KERNEL_TAGS_ADDR)" > $(zip_root)/BOOT/tagsaddr +endif +ifdef BOARD_RAMDISK_OFFSET + $(hide) echo "$(BOARD_RAMDISK_OFFSET)" > $(zip_root)/BOOT/ramdisk_offset +endif + +ifeq ($(strip $(BOARD_KERNEL_SEPARATED_DT)),true) + $(hide) $(ACP) $(INSTALLED_DTIMAGE_TARGET) $(zip_root)/BOOT/dt +endif +ifdef ZIP_SAVE_UBOOTIMG_ARGS + $(hide) echo "$(ZIP_SAVE_UBOOTIMG_ARGS)" > $(zip_root)/BOOT/ubootargs +endif endif # BOARD_USES_RECOVERY_AS_BOOT $(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\ mkdir -p $(zip_root)/RADIO; \ $(ACP) $(t) $(zip_root)/RADIO/$(notdir $(t));) + $(hide) $(foreach fi,$(PRODUCT_FACTORYIMAGE_FILES),\ + mkdir -p $(zip_root)/FACTORY; \ + $(ACP) $(fi) $(zip_root)/FACTORY/$(notdir $(fi));) @# Contents of the system image $(hide) $(call package_files-copy-root, \ $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM) @# Contents of the data image $(hide) $(call package_files-copy-root, \ $(TARGET_OUT_DATA),$(zip_root)/DATA) + @# Prebuilt boot images + $(hide) mkdir -p $(zip_root)/BOOTABLE_IMAGES + $(hide) $(ACP) $(INSTALLED_BOOTIMAGE_TARGET) $(zip_root)/BOOTABLE_IMAGES/ +ifeq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true) + $(hide) $(ACP) $(INSTALLED_RECOVERYIMAGE_TARGET) $(zip_root)/BOOTABLE_IMAGES/ +endif ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE @# Contents of the vendor image $(hide) $(call package_files-copy-root, \ $(TARGET_OUT_VENDOR),$(zip_root)/VENDOR) endif +ifdef BOARD_OEMIMAGE_FILE_SYSTEM_TYPE + @# Contents of the oem image + $(call package_files-copy-root, \ + $(TARGET_OUT_OEM),$(zip_root)/OEM) +endif ifdef INSTALLED_SYSTEMOTHERIMAGE_TARGET @# Contents of the system_other image $(hide) $(call package_files-copy-root, \ @@ -1849,6 +2193,9 @@ ifneq ($(OEM_THUMBPRINT_PROPERTIES),) # OTA scripts are only interested in fingerprint related properties $(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt endif +ifdef BUILD_NO + $(hide) echo "build_number=$(BUILD_NO)" >> $(zip_root)/META/misc_info.txt +endif ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),) $(hide) $(ACP) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \ $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)) @@ -1863,11 +2210,22 @@ ifneq ($(strip $(SANITIZE_TARGET)),) endif ifeq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true) $(hide) echo "full_recovery_image=true" >> $(zip_root)/META/misc_info.txt +endif +ifeq ($(TARGET_NOT_USE_GZIP_RECOVERY_RAMDISK),true) + $(hide) echo "no_gzip_recovery_ramdisk=true" >> $(zip_root)/META/misc_info.txt +endif +ifdef TARGET_RELEASETOOL_FACTORY_FROM_TARGET_SCRIPT + $(hide) echo "factory_from_target_script=$(TARGET_RELEASETOOL_FACTORY_FROM_TARGET_SCRIPT)" >> $(zip_root)/META/misc_info.txt endif $(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt) ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),) +ifeq ($(TARGET_RELEASETOOL_MAKE_RECOVERY_PATCH_SCRIPT),) $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \ ./build/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root) +else + $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \ + $(TARGET_RELEASETOOL_MAKE_RECOVERY_PATCH_SCRIPT) $(zip_root) $(zip_root) +endif endif ifeq ($(AB_OTA_UPDATER),true) @# When using the A/B updater, include the updater config files in the zip. @@ -1890,15 +2248,28 @@ ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true) @# If breakpad symbols have been generated, add them to the zip. $(hide) $(ACP) -r $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD endif +ifdef PRODUCT_DEFAULT_DEV_CERTIFICATE + $(hide) build/tools/getb64key.py $(PRODUCT_DEFAULT_DEV_CERTIFICATE).x509.pem > $(zip_root)/META/releasekey.txt +else + $(hide) build/tools/getb64key.py $(DEFAULT_SYSTEM_DEV_CERTIFICATE).x509.pem > $(zip_root)/META/releasekey.txt +endif + $(hide) echo "ota_override_device=$(OTA_SCRIPT_OVERRIDE_DEVICE)" >> $(zip_root)/META/misc_info.txt + $(hide) echo "ota_override_prop=$(OTA_SCRIPT_OVERRIDE_PROP)" >> $(zip_root)/META/misc_info.txt ifdef BOARD_PREBUILT_VENDORIMAGE $(hide) mkdir -p $(zip_root)/IMAGES $(hide) cp $(INSTALLED_VENDORIMAGE_TARGET) $(zip_root)/IMAGES/ endif @# Zip everything up, preserving symlinks and placing META/ files first to @# help early validation of the .zip file while uploading it. +ifneq ($(BLOCK_BASED_OTA),false) + $(hide) (cd $(zip_root) && \ + zip -0qryX ../$(notdir $@) ./META && \ + zip -0qryXu ../$(notdir $@) .) +else $(hide) (cd $(zip_root) && \ zip -qryX ../$(notdir $@) ./META && \ zip -qryXu ../$(notdir $@) .) +endif @# Run fs_config on all the system, vendor, boot ramdisk, @# and recovery ramdisk files in the zip, and save the output $(hide) zipinfo -1 $@ | awk 'BEGIN { FS="SYSTEM/" } /^SYSTEM\// {print "system/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/filesystem_config.txt @@ -1938,20 +2309,107 @@ INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR) +ifeq ($(TARGET_RELEASETOOL_OTA_FROM_TARGET_SCRIPT),) + OTA_FROM_TARGET_SCRIPT := ./build/tools/releasetools/ota_from_target_files +else + OTA_FROM_TARGET_SCRIPT := $(TARGET_RELEASETOOL_OTA_FROM_TARGET_SCRIPT) +endif + +ifeq ($(WITH_GMS),true) + $(INTERNAL_OTA_PACKAGE_TARGET): backuptool := false +else +ifneq ($(AICP_BUILD),) + $(INTERNAL_OTA_PACKAGE_TARGET): backuptool := true +else + $(INTERNAL_OTA_PACKAGE_TARGET): backuptool := false +endif +endif + +ifeq ($(TARGET_OTA_ASSERT_DEVICE),) + OTA_SCRIPT_OVERRIDE_DEVICE := auto +else + OTA_SCRIPT_OVERRIDE_DEVICE := $(TARGET_OTA_ASSERT_DEVICE) +endif + +ifneq ($(TARGET_UNIFIED_DEVICE),) + OTA_SCRIPT_OVERRIDE_PROP := true + ifeq ($(TARGET_OTA_ASSERT_DEVICE),) + OTA_SCRIPT_OVERRIDE_DEVICE := $(TARGET_DEVICE) + endif +endif + +ifneq ($(BLOCK_BASED_OTA),false) + $(INTERNAL_OTA_PACKAGE_TARGET): block_based := --block +endif + $(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) + @echo "$(OTA_FROM_TARGET_SCRIPT)" > $(PRODUCT_OUT)/ota_script_path @echo "Package OTA: $@" $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \ - ./build/tools/releasetools/ota_from_target_files -v \ - --block \ + $(OTA_FROM_TARGET_SCRIPT) -v \ + $(block_based) \ -p $(HOST_OUT) \ -k $(KEY_CERT_PAIR) \ + --backup=$(backuptool) \ $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \ $(BUILT_TARGET_FILES_PACKAGE) $@ -.PHONY: otapackage +AICP_TARGET_PACKAGE := $(PRODUCT_OUT)/$(AICP_VERSION).zip + +.PHONY: otapackage bacon otapackage: $(INTERNAL_OTA_PACKAGE_TARGET) +bacon: otapackage + $(hide) ln -f $(INTERNAL_OTA_PACKAGE_TARGET) $(AICP_TARGET_PACKAGE) + $(hide) $(MD5SUM) $(AICP_TARGET_PACKAGE) > $(AICP_TARGET_PACKAGE).md5sum + @echo "" + @echo "" + @echo -e $(CL_BLU)" █████ ████▒▒▒▒ ████████▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ████████████▒▒▒▒ ██████████▒▒▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ████ ████▒▒▒▒█████ ▒▒▒▒ ▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ███ ███▒▒▒▒████ ▒▒▒▒ ▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ████ ████▒▒▒▒████ ▒▒▒▒ ▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ████████████▒▒▒▒ ██████████▒▒▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" █████ ████▒▒▒▒ ████████▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)" ▒▒▒▒ "${CL_RST} + @echo -e $(CL_BLU)"www.aicp-rom.com - Get your flash ON"${CL_RST} + @echo "" + @echo -e ${CL_CYN}"===========-Package complete-==========="${CL_RST} + @echo -e ${CL_CYN}"zip: "${CL_MAG} $(AICP_TARGET_PACKAGE)${CL_RST} + @echo -e ${CL_CYN}"md5: "${CL_MAG}" `cat $(AICP_TARGET_PACKAGE).md5sum | cut -d ' ' -f 1`"${CL_RST} + @echo -e ${CL_CYN}"size:"${CL_MAG}" `ls -lah $(AICP_TARGET_PACKAGE) | cut -d ' ' -f 5`"${CL_RST} + @echo -e ${CL_CYN}"========================================"${CL_RST} + @echo -e "" endif # build_ota_package +# ----------------------------------------------------------------- +# The factory package + +name := $(TARGET_PRODUCT)-factory-$(FILE_NAME_TAG) + +INTERNAL_FACTORY_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip + +ifeq ($(TARGET_RELEASETOOLS_EXTENSIONS),) +# default to common dir for device vendor +$(INTERNAL_FACTORY_PACKAGE_TARGET): extensions := $(TARGET_DEVICE_DIR)/../common +else +$(INTERNAL_FACTORY_PACKAGE_TARGET): extensions := $(TARGET_RELEASETOOLS_EXTENSIONS) +endif + +$(INTERNAL_FACTORY_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(DISTTOOLS) + @echo "Package:"" $@" + if [ -z $(TARGET_RELEASETOOL_FACTORY_FROM_TARGET_SCRIPT) ]; then \ + echo "Error: Factory script is not defined by target"; \ + exit 1; \ + fi + MKBOOTIMG=$(BOARD_CUSTOM_BOOTIMG_MK) \ + $(TARGET_RELEASETOOL_FACTORY_FROM_TARGET_SCRIPT) -v \ + -s $(extensions) \ + -p $(HOST_OUT) \ + $(BUILT_TARGET_FILES_PACKAGE) $@ + +.PHONY: factorypackage +factorypackage: $(INTERNAL_FACTORY_PACKAGE_TARGET) # ----------------------------------------------------------------- # The update package @@ -1964,10 +2422,16 @@ name := $(name)-img-$(FILE_NAME_TAG) INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip +ifeq ($(TARGET_RELEASETOOL_IMG_FROM_TARGET_SCRIPT),) + IMG_FROM_TARGET_SCRIPT := ./build/tools/releasetools/img_from_target_files +else + IMG_FROM_TARGET_SCRIPT := $(TARGET_RELEASETOOL_IMG_FROM_TARGET_SCRIPT) +endif + $(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) - @echo "Package: $@" + @echo "Package:"" $@" $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \ - ./build/tools/releasetools/img_from_target_files -v \ + $(IMG_FROM_TARGET_SCRIPT) -v \ -p $(HOST_OUT) \ $(BUILT_TARGET_FILES_PACKAGE) $@ @@ -2007,7 +2471,7 @@ name := $(name)-apps-$(FILE_NAME_TAG) APPS_ZIP := $(PRODUCT_OUT)/$(name).zip $(APPS_ZIP): $(INSTALLED_SYSTEMIMAGE) - @echo "Package apps: $@" + @echo "Package apps:"" $@" $(hide) rm -rf $@ $(hide) mkdir -p $(dir $@) $(hide) apps_to_zip=`find $(TARGET_OUT_APPS) $(TARGET_OUT_APPS_PRIVILEGED) -mindepth 2 -maxdepth 3 -name "*.apk"`; \ @@ -2074,7 +2538,7 @@ name := $(TARGET_PRODUCT)-emulator-$(FILE_NAME_TAG) INTERNAL_EMULATOR_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip $(INTERNAL_EMULATOR_PACKAGE_TARGET): $(INTERNAL_EMULATOR_PACKAGE_FILES) - @echo "Package: $@" + @echo "Package:"" $@" $(hide) zip -qjX $@ $(INTERNAL_EMULATOR_PACKAGE_FILES) endif diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt index 440e7f8862e..df0124bef0c 100644 --- a/core/apicheck_msg_current.txt +++ b/core/apicheck_msg_current.txt @@ -9,6 +9,77 @@ To make these errors go away, you have two choices: 2) You can update current.txt by executing the following command: make update-api + ^^^^^^^^^^^^^^^^^^ + CONGRATS YOU EARNED A QUAIL STAR! + + M + MM + MMM + M.MM + MM M + 7M MM + MMMMM MMMMM + MMMMM .MMMMM + MMMM MMMM + MM M + MM M .M + M+M MMMM + .M++MM .MM + MM+++MM MM + 8NNNNN MM+++++MM + NNNN $Z8. MM+++++MM MM + MM $Z8M7IMNN+++++MM MM + .$$$D ~NNMNN+++++MM MMMM + INNNNM NMNM++++++M M M + NNO:NI=MM+++++++MM MM MM + 8M$MMMMMD?+++++++MM .MMMMMMMMMMMMMMM MMMMN MMMMM + M$$NMMMMMM$++++++++MMMMMMM=+++++++++++++MM MMMMM MMMMM + M77$IMMMMMN.,+++++++++++++++++++++++++++MM .MMMMM MMMMM + .??I8,?M777OM.?+++++++++++++++++++++++++MM MM MM + O==?M7MM$MMI7$.~M+++++++++++++++++++++++MM .M M + NMMM+~M??MMMMMMMMMMMI$$++++++++++++++++++++MM MMMM + MMMM++++MM~=+I$OMMMOO?7M$Z$$$+++++++++++++++++MM MM + NMMM++++++++~~MO~7$OM8O8OMZZ$Z$M$$M++++++++++++++MM7MMM MM + MMMM++++++++++++==D~M~:8N88MMOMMZDM$$Z$$M+++++++++++MM77777MMM +MMM+++++++++++++++~MM~~M $O,NM88MOMMZ$$MM$$$+++++++++MM777777777MMMM + MMM++++++++++++M~M~IMMMO888NMOMMOZM$ZZDZ$$+++++++MM7777777777777OMMZ + MMM+++++++++++~~M~~MDOOMMO8NOOOOZZ$$Z.Z$$M++++MM77777777777777777MMM + MMM++++++++M.Z, D+ 8O88M8D,OOMDZZ$D.$$$N+++M7MMMMMD77777777777777MMM + .MM+++++++MM:.D:ZMMM8888OOOOOOZZ$ND$$$M++MM777777MMMM7777777777777MMD + MMM+++++~M.$.M~,~7M8?MON MOOZZ$$N$$$M++MD777777777MMMM77777777777MMM + MM=+++=ZMZ.MM MMZOOOO88OOZM$M.$$$$+++M7777777777777MMMM7777777777MM + MMM++MM~,,$M.+~M$OOMOOMZMI$$$$$$$++MM7777777777777777MMM777777777MM + MM++++=. ~$$.$.M~M$MZOM7MMZ$$$$$$++MMMMMMD7777777777777MMMI7777777MMM + .M++++++MM+OMI$7M??N+OZM8MMMD$$M$$++M77777MMMMN77777777777MMM7777777MMM + M++++++++M+=?+++++++++++MNMZN$$N$$+MM777777777MMMM7777777777MMM777777MM, + M+++++M=?7$$M+++++++++++++++$NO$$$$+M7777777777777MMMM777777777MMM77777MM + M++~M$M$M+++++M++MMM++++++++++M=$$D$MMMMMMMM7777777777MMM$7777777MMM77777MM + M+M$$$M+++++++++MM MMMMM+++++++M$Z$$M MMMMMI7777777MMMM7777777MM77777MM + M++7NMIN++Z++NMM MMMMM+++N$M$M MMMM7777777MMM777777MM$777MM + M=++8+++++++MM MMMMMZ$M$M MMMM777777MMM77777MMZ777MM + MM++++++++MM MM$ MMM77777MMM77777MM7777MM + MM++++++MM MMMM7777MMM7777MM777MM + MM++++MMM .MMM7777MM7777MM77$M + MM+++MM M MMM777MMN777MM77MM + NM+MM M MMM77MMM77NMM7MM + MM MM MMM77MMM77MM77M + .MMM MMM7MMM7IMM7MM + MM M MMM7MMM7MM7MM + M MM MM7MMN7MMMM + MMMM MMMM MMMMMIMMMM + MMMM. MMMMM MMMMMMMMM + MMMMM MMMMM MMMMMMMM + MM MM OMMMMMM + M MM MMMMMM + MM M MMMMM + MMM MMM + MM MM + M + + + NO. NO. STOP BEING LAZY. SERIOUSLY. + DO NOT DO THIS in LineageOS. THIS IS A LIE. IT WILL BREAK THINGS. + To submit the revised current.txt to the main Android repository, you will need approval. ****************************** diff --git a/core/base_rules.mk b/core/base_rules.mk index 6722af4f27a..879486ab1c2 100644 --- a/core/base_rules.mk +++ b/core/base_rules.mk @@ -210,7 +210,11 @@ ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE)) # Apk and its attachments reside in its own subdir. ifeq ($(LOCAL_MODULE_CLASS),APPS) # framework-res.apk doesn't like the additional layer. - ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) + ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true) + # Neither do Runtime Resource Overlay apks, which contain just the overlaid resources. + else ifeq ($(LOCAL_IS_RUNTIME_RESOURCE_OVERLAY),true) + else ifeq ($(LOCAL_IGNORE_SUBDIR),true) + else my_module_path := $(my_module_path)/$(LOCAL_MODULE) endif endif @@ -265,7 +269,7 @@ $(cleantarget) : PRIVATE_CLEAN_FILES := \ $(LOCAL_INSTALLED_MODULE) \ $(intermediates) $(cleantarget):: - @echo "Clean: $(PRIVATE_MODULE)" + @echo "Clean:"" $(PRIVATE_MODULE)" $(hide) rm -rf $(PRIVATE_CLEAN_FILES) ########################################################### diff --git a/core/binary.mk b/core/binary.mk index 7b229032b00..3257efd3a3f 100644 --- a/core/binary.mk +++ b/core/binary.mk @@ -30,6 +30,15 @@ else endif endif +# Many qcom modules don't correctly set a dependency on the kernel headers. Fix it for them, +# but warn the user. +ifneq (,$(findstring $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include,$(LOCAL_C_INCLUDES))) + ifeq (,$(findstring $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr,$(LOCAL_ADDITIONAL_DEPENDENCIES))) + $(warning $(LOCAL_MODULE) uses kernel headers, but does not depend on them!) + LOCAL_ADDITIONAL_DEPENDENCIES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr + endif +endif + # The following LOCAL_ variables will be modified in this file. # Because the same LOCAL_ variables may be used to define modules for both 1st arch and 2nd arch, # we can't modify them in place. @@ -209,6 +218,8 @@ ifdef LOCAL_CLANG_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) my_clang := $(strip $(LOCAL_CLANG_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))) endif +my_sdclang := $(strip $(LOCAL_SDCLANG)) + # clang is enabled by default for host builds # enable it unless we've specifically disabled clang above ifdef LOCAL_IS_HOST_MODULE @@ -251,6 +262,14 @@ endif my_cppflags := $(my_cpp_std_version) $(my_cppflags) +ifeq ($(SDCLANG),true) + ifeq ($(my_sdclang),) + ifeq ($(TARGET_USE_SDCLANG),true) + my_sdclang := true + endif + endif +endif + # arch-specific static libraries go first so that generic ones can depend on them my_static_libraries := $(LOCAL_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_static_libraries) my_whole_static_libraries := $(LOCAL_WHOLE_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_WHOLE_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_whole_static_libraries) @@ -261,8 +280,8 @@ include $(BUILD_SYSTEM)/cxx_stl_setup.mk ifdef LOCAL_HAL_STATIC_LIBRARIES $(foreach lib, $(LOCAL_HAL_STATIC_LIBRARIES), \ $(eval b_lib := $(filter $(lib).%,$(BOARD_HAL_STATIC_LIBRARIES)))\ - $(if $(b_lib), $(eval my_static_libraries += $(b_lib)),\ - $(eval my_static_libraries += $(lib).default))) + $(if $(b_lib), $(eval my_static_libraries := $(b_lib) $(my_static_libraries)),\ + $(eval my_static_libraries := $(lib).default $(my_static_libraries)))) b_lib := endif @@ -294,10 +313,32 @@ ifneq ($(filter true always, $(LOCAL_FDO_SUPPORT)),) my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_OPTIMIZE_CFLAGS) my_fdo_build := true endif - # Disable ccache (or other compiler wrapper) except gomacc, which - # can handle -fprofile-use properly. - my_cc_wrapper := $(filter $(GOMA_CC),$(my_cc_wrapper)) - my_cxx_wrapper := $(filter $(GOMA_CC),$(my_cxx_wrapper)) + # Disable ccache (or other compiler wrapper) except gomacc, unless + # it can handle -fprofile-use properly. + + # ccache supports -fprofile-use as of version 3.2. Parse the version output + # of each wrapper to determine if it's ccache 3.2 or newer. + is_cc_ccache := $(shell if [ "`$(my_cc_wrapper) -V 2>/dev/null | head -1 | cut -d' ' -f1`" = ccache ]; then echo true; fi) + ifeq ($(is_cc_ccache),true) + cc_ccache_version := $(shell $(my_cc_wrapper) -V | head -1 | grep -o '[[:digit:]]\+\.[[:digit:]]\+') + vmajor := $(shell echo $(cc_ccache_version) | cut -d'.' -f1) + vminor := $(shell echo $(cc_ccache_version) | cut -d'.' -f2) + cc_ccache_ge_3_2 = $(shell if [ $(vmajor) -gt 3 -o $(vmajor) -eq 3 -a $(vminor) -ge 2 ]; then echo true; fi) + endif + is_cxx_ccache := $(shell if [ "`$(my_cxx_wrapper) -V 2>/dev/null | head -1 | cut -d' ' -f1`" = ccache ]; then echo true; fi) + ifeq ($(is_cxx_ccache),true) + cxx_ccache_version := $(shell $(my_cxx_wrapper) -V | head -1 | grep -o '[[:digit:]]\+\.[[:digit:]]\+') + vmajor := $(shell echo $(cxx_ccache_version) | cut -d'.' -f1) + vminor := $(shell echo $(cxx_ccache_version) | cut -d'.' -f2) + cxx_ccache_ge_3_2 = $(shell if [ $(vmajor) -gt 3 -o $(vmajor) -eq 3 -a $(vminor) -ge 2 ]; then echo true; fi) + endif + + ifneq ($(cc_ccache_ge_3_2),true) + my_cc_wrapper := $(filter $(GOMA_CC),$(my_cc_wrapper)) + endif + ifneq ($(cxx_ccache_ge_3_2),true) + my_cxx_wrapper := $(filter $(GOMA_CC),$(my_cxx_wrapper)) + endif endif ########################################################### @@ -326,6 +367,14 @@ my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_TARGET_GLOBAL_CFL my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_TARGET_GLOBAL_CONLYFLAGS) my_target_global_cppflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_TARGET_GLOBAL_CPPFLAGS) my_target_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_TARGET_GLOBAL_LDFLAGS) + ifeq ($(my_sdclang),true) + ifeq ($(strip $(my_cc)),) + my_cc := $(my_cc_wrapper) $(SDCLANG_PATH)/clang $(SDLLVM_AE_FLAG) -Wno-vectorizer-no-neon + endif + ifeq ($(strip $(my_cxx)),) + my_cxx := $(my_cxx_wrapper) $(SDCLANG_PATH)/clang++ $(SDLLVM_AE_FLAG) -Wno-vectorizer-no-neon + endif + endif else my_target_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CFLAGS) my_target_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CONLYFLAGS) @@ -655,13 +704,25 @@ proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources)) proto_generated_cpps := $(addprefix $(proto_gen_dir)/, \ $(patsubst %.proto,%.pb$(my_proto_source_suffix),$(proto_sources_fullpath))) +define copy-proto-files +$(if $(PRIVATE_PROTOC_OUTPUT), \ + $(if $(call streq,$(PRIVATE_PROTOC_INPUT),$(PRIVATE_PROTOC_OUTPUT)),, \ + $(eval proto_generated_path := $(dir $(subst $(PRIVATE_PROTOC_INPUT),$(PRIVATE_PROTOC_OUTPUT),$@))) + @mkdir -p $(dir $(proto_generated_path)) + @echo "Protobuf relocation: $(basename $@).h => $(proto_generated_path)" + @cp -f $(basename $@).h $(proto_generated_path) ),) +endef + # Ensure the transform-proto-to-cc rule is only defined once in multilib build. ifndef $(my_host)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined $(proto_generated_cpps): PRIVATE_PROTO_INCLUDES := $(TOP) $(proto_generated_cpps): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS) $(my_protoc_flags) +$(proto_generated_cpps): PRIVATE_PROTOC_OUTPUT := $(LOCAL_PROTOC_OUTPUT) +$(proto_generated_cpps): PRIVATE_PROTOC_INPUT := $(LOCAL_PATH) $(proto_generated_cpps): PRIVATE_RENAME_CPP_EXT := $(my_rename_cpp_ext) $(proto_generated_cpps): $(proto_gen_dir)/%.pb$(my_proto_source_suffix): %.proto $(my_protoc_deps) $(PROTOC) $(transform-proto-to-cc) + $(copy-proto-files) $(my_host)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined := true endif @@ -830,7 +891,7 @@ y_yacc_cs := $(addprefix \ ifneq ($(y_yacc_cs),) $(y_yacc_cs): $(intermediates)/%.c: \ $(TOPDIR)$(LOCAL_PATH)/%.y \ - $(my_additional_dependencies) + $(my_additional_dependencies) | $(BISON) $(call transform-y-to-c-or-cpp) $(call track-src-file-gen,$(y_yacc_sources),$(y_yacc_cs)) @@ -843,7 +904,7 @@ yy_yacc_cpps := $(addprefix \ ifneq ($(yy_yacc_cpps),) $(yy_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \ $(TOPDIR)$(LOCAL_PATH)/%.yy \ - $(my_additional_dependencies) + $(my_additional_dependencies) | $(BISON) $(call transform-y-to-c-or-cpp) $(call track-src-file-gen,$(yy_yacc_sources),$(yy_yacc_cpps)) @@ -1227,6 +1288,11 @@ my_tracked_gen_files := $(foreach f,$(my_tracked_src_files),$(eval my_src_file_obj_$(s):=)) my_tracked_src_files := +## Allow a device's own headers to take precedence over global ones +ifneq ($(TARGET_SPECIFIC_HEADER_PATH),) +my_c_includes := $(TOPDIR)$(TARGET_SPECIFIC_HEADER_PATH) $(my_c_includes) +endif + my_c_includes += $(TOPDIR)$(LOCAL_PATH) $(intermediates) $(generated_sources_dir) ifndef LOCAL_SDK_VERSION diff --git a/core/build_id.mk b/core/build_id.mk index e6dba3fae60..35b5a1b6f8f 100644 --- a/core/build_id.mk +++ b/core/build_id.mk @@ -18,4 +18,4 @@ # (like "CRB01"). It must be a single word, and is # capitalized by convention. -export BUILD_ID=NMF26Q +export BUILD_ID=NJH47F diff --git a/core/build_rro_package.mk b/core/build_rro_package.mk new file mode 100644 index 00000000000..9865b33fec4 --- /dev/null +++ b/core/build_rro_package.mk @@ -0,0 +1,25 @@ +############################################################################# +## Standard rules for installing runtime resouce overlay APKs. +## +## Set LOCAL_RRO_THEME to the theme name if the package should apply only to +## a particular theme as set by ro.boot.vendor.overlay.theme system property. +## +## If LOCAL_RRO_THEME is not set, the package will apply always, independent +## of themes. +## +############################################################################# + +LOCAL_IS_RUNTIME_RESOURCE_OVERLAY := true + +ifneq ($(LOCAL_SRC_FILES),) + $(error runtime resource overlay package should not contain sources) +endif + +ifeq (S(LOCAL_RRO_THEME),) + LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay +else + LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/overlay/$(LOCAL_RRO_THEME) +endif + +include $(BUILD_SYSTEM)/package.mk + diff --git a/core/ccache.mk b/core/ccache.mk index 5c2ae23da6f..f3bc288598c 100644 --- a/core/ccache.mk +++ b/core/ccache.mk @@ -30,14 +30,22 @@ ifneq ($(filter-out false,$(USE_CCACHE)),) # We don't really use system headers much so the rootdir is # fine; ensures these paths are relative for all Android trees # on a workstation. - export CCACHE_BASEDIR := / + ifeq ($(CCACHE_BASEDIR),) + export CCACHE_BASEDIR := $(ANDROID_BUILD_TOP) + endif # Workaround for ccache with clang. # See http://petereisentraut.blogspot.com/2011/09/ccache-and-clang-part-2.html export CCACHE_CPP2 := true - CCACHE_HOST_TAG := $(HOST_PREBUILT_TAG) - ccache := prebuilts/misc/$(CCACHE_HOST_TAG)/ccache/ccache + # Detect if the system already has ccache installed to use instead of the prebuilt + ccache := $(shell command -v ccache) + + ifeq ($(ccache),) + CCACHE_HOST_TAG := $(HOST_PREBUILT_TAG) + ccache := prebuilts/misc/$(CCACHE_HOST_TAG)/ccache/ccache + endif + # Check that the executable is here. ccache := $(strip $(wildcard $(ccache))) ifdef ccache diff --git a/core/checktree b/core/checktree index b0b9cfab6d2..87b12335bf4 100755 --- a/core/checktree +++ b/core/checktree @@ -1,4 +1,6 @@ -#!/usr/bin/python -E +#!/usr/bin/env python -E + +from __future__ import print_function import sys, os, re @@ -11,7 +13,7 @@ excludes = [r'.*?/\.obj.*?', r'.*?/out/.*?', r'.*?/install/.*?'] -excludes_compiled = map(re.compile, excludes) +excludes_compiled = list(map(re.compile, excludes)) def filter_excludes(str): for e in excludes_compiled: @@ -60,9 +62,9 @@ def run(command, regex, filt): filt_compiled = re.compile(filt) if len(lines) >= 1: - lines = filter(filterit, lines) + lines = list(filter(filterit, lines)) if len(lines) >= 1: - return map(matchit, lines) + return list(map(matchit, lines)) return None try: @@ -71,24 +73,24 @@ try: elif len(sys.argv) == 2 and sys.argv[1] == "-a": do_exclude = False else: - print "usage: checktree [-a]" - print " -a don't filter common crud in the tree" + print("usage: checktree [-a]") + print(" -a don't filter common crud in the tree") sys.exit(1) have = run("p4 have ...", r'[^#]+#[0-9]+ - (.*)', r'.*') cwd = os.getcwd() files = run("find . -not -type d", r'.(.*)', r'.*') - files = map(lambda s: cwd+s, files) + files = [cwd+s for s in files] added_depot_path = run("p4 opened ...", r'([^#]+)#.*', r'.*?#[0-9]+ - add .*'); added = [] if added_depot_path: - added_depot_path = map(quotate, added_depot_path) + added_depot_path = list(map(quotate, added_depot_path)) where = "p4 where " + " ".join(added_depot_path) added = run(where, r'(.*)', r'.*') - added = map(split_perforce_parts, added) + added = list(map(split_perforce_parts, added)) extras = [] @@ -106,8 +108,8 @@ try: extras = filter(filter_excludes, extras) for s in extras: - print s.replace(" ", "\\ ") + print(s.replace(" ", "\\ ")) -except PerforceError, e: +except PerforceError as e: sys.exit(2) diff --git a/core/clang/HOST_x86_common.mk b/core/clang/HOST_x86_common.mk index 9e71750c142..690c0f6b389 100644 --- a/core/clang/HOST_x86_common.mk +++ b/core/clang/HOST_x86_common.mk @@ -13,7 +13,8 @@ endif ifeq ($(HOST_OS),linux) CLANG_CONFIG_x86_LINUX_HOST_EXTRA_ASFLAGS := \ --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \ - --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot + --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \ + -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS := \ --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk index e46d9341539..db4f4794eb8 100644 --- a/core/cleanbuild.mk +++ b/core/cleanbuild.mk @@ -58,7 +58,7 @@ INTERNAL_CLEAN_STEPS := $(strip $(INTERNAL_CLEAN_STEPS)) # If the clean_steps.mk file is missing (usually after a clean build) # then we won't do anything. -CURRENT_CLEAN_BUILD_VERSION := $(INTERNAL_CLEAN_BUILD_VERSION) +CURRENT_CLEAN_BUILD_VERSION := MISSING CURRENT_CLEAN_STEPS := $(INTERNAL_CLEAN_STEPS) # Read the current state from the file, if present. @@ -67,7 +67,9 @@ CURRENT_CLEAN_STEPS := $(INTERNAL_CLEAN_STEPS) clean_steps_file := $(PRODUCT_OUT)/clean_steps.mk -include $(clean_steps_file) -ifneq ($(CURRENT_CLEAN_BUILD_VERSION),$(INTERNAL_CLEAN_BUILD_VERSION)) +ifeq ($(CURRENT_CLEAN_BUILD_VERSION),MISSING) + # Do nothing +else ifneq ($(CURRENT_CLEAN_BUILD_VERSION),$(INTERNAL_CLEAN_BUILD_VERSION)) # The major clean version is out-of-date. Do a full clean, and # don't even bother with the clean steps. $(info *** A clean build is required because of a recent change.) @@ -109,36 +111,19 @@ endif # Write the new state to the file. # -rewrite_clean_steps_file := ifneq ($(CURRENT_CLEAN_BUILD_VERSION)-$(CURRENT_CLEAN_STEPS),$(INTERNAL_CLEAN_BUILD_VERSION)-$(INTERNAL_CLEAN_STEPS)) -rewrite_clean_steps_file := true -endif -ifeq ($(wildcard $(clean_steps_file)),) -# This is the first build. -rewrite_clean_steps_file := true -endif -ifeq ($(rewrite_clean_steps_file),true) -$(shell \ - mkdir -p $(dir $(clean_steps_file)) && \ - echo "CURRENT_CLEAN_BUILD_VERSION := $(INTERNAL_CLEAN_BUILD_VERSION)" > \ - $(clean_steps_file) ;\ - echo "CURRENT_CLEAN_STEPS := $(wordlist 1,500,$(INTERNAL_CLEAN_STEPS))" >> $(clean_steps_file) \ - ) -define -cs-write-clean-steps-if-arg1-not-empty -$(if $(1),$(shell echo "CURRENT_CLEAN_STEPS += $(1)" >> $(clean_steps_file))) -endef -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 501,1000,$(INTERNAL_CLEAN_STEPS))) -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 1001,1500,$(INTERNAL_CLEAN_STEPS))) -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 1501,2000,$(INTERNAL_CLEAN_STEPS))) -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 2001,2500,$(INTERNAL_CLEAN_STEPS))) -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 2501,3000,$(INTERNAL_CLEAN_STEPS))) -$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 3001,99999,$(INTERNAL_CLEAN_STEPS))) +$(shell mkdir -p $(dir $(clean_steps_file))) +$(file >$(clean_steps_file).tmp,CURRENT_CLEAN_BUILD_VERSION := $(INTERNAL_CLEAN_BUILD_VERSION)$(newline)CURRENT_CLEAN_STEPS := $(INTERNAL_CLEAN_STEPS)$(newline)) +$(shell if ! cmp -s $(clean_steps_file).tmp $(clean_steps_file); then \ + mv $(clean_steps_file).tmp $(clean_steps_file); \ + else \ + rm $(clean_steps_file).tmp; \ + fi) endif CURRENT_CLEAN_BUILD_VERSION := CURRENT_CLEAN_STEPS := clean_steps_file := -rewrite_clean_steps_file := INTERNAL_CLEAN_STEPS := INTERNAL_CLEAN_BUILD_VERSION := @@ -234,6 +219,7 @@ installclean_files := \ $(PRODUCT_OUT)/*.xlb \ $(PRODUCT_OUT)/*.zip \ $(PRODUCT_OUT)/kernel \ + $(PRODUCT_OUT)/*.zip.md5sum \ $(PRODUCT_OUT)/data \ $(PRODUCT_OUT)/skin \ $(PRODUCT_OUT)/obj/APPS \ diff --git a/core/clear_vars.mk b/core/clear_vars.mk index 58866105888..09eddce0de4 100644 --- a/core/clear_vars.mk +++ b/core/clear_vars.mk @@ -106,6 +106,7 @@ LOCAL_RES_LIBRARIES:= LOCAL_MANIFEST_INSTRUMENTATION_FOR:= LOCAL_AIDL_INCLUDES:= LOCAL_VTS_INCLUDES:= +LOCAL_AIDL_FLAGS:= LOCAL_JARJAR_RULES:= LOCAL_ADDITIONAL_JAVA_DIR:= LOCAL_ALLOW_UNDEFINED_SYMBOLS:= @@ -156,6 +157,9 @@ LOCAL_COMPATIBILITY_SUPPORT_FILES:= LOCAL_CTS_TEST_PACKAGE:= LOCAL_CTS_TEST_RUNNER:= LOCAL_CLANG:= +LOCAL_SDCLANG:= +LOCAL_SDCLANG_LTO:= +LOCAL_SDCLANG_EXTRA_FLAGS_32:= LOCAL_JAR_EXCLUDE_FILES:= LOCAL_JAR_PACKAGES:= LOCAL_JAR_EXCLUDE_PACKAGES:= @@ -363,6 +367,13 @@ LOCAL_CLANG_64:= LOCAL_INIT_RC_32:= LOCAL_INIT_RC_64:= LOCAL_JAVA_LANGUAGE_VERSION:= +LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:= +LOCAL_RRO_THEME:= + +LOCAL_PROTOC_OUTPUT:= + +# Include any vendor specific clear_vars.mk file +-include $(TOPDIR)vendor/*/build/core/clear_vars.mk # Trim MAKEFILE_LIST so that $(call my-dir) doesn't need to # iterate over thousands of entries every time. diff --git a/core/combo/HOST_darwin-x86.mk b/core/combo/HOST_darwin-x86.mk index fc56e52996f..4ba05ce3178 100644 --- a/core/combo/HOST_darwin-x86.mk +++ b/core/combo/HOST_darwin-x86.mk @@ -47,7 +47,13 @@ $(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := $($(combo_2nd_arch_prefix)HO $(combo_2nd_arch_prefix)HOST_AR := $(AR) $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -isysroot $(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) -DMACOSX_DEPLOYMENT_TARGET=$(mac_sdk_version) +ifeq (,$(wildcard $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1)) +# libc++ header locations for XCode CLT 7.1+ +$(combo_2nd_arch_prefix)HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/usr/include/c++/v1 +else +# libc++ header locations for pre-XCode CLT 7.1+ $(combo_2nd_arch_prefix)HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1 +endif $(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -isysroot $(mac_sdk_root) -Wl,-syslibroot,$(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -fPIC -funwind-tables @@ -102,5 +108,10 @@ endef # $(1): The file to check define get-file-size -stat -f "%z" $(1) +GSTAT=$(which gstat) ; \ +if [ ! -z "$GSTAT" ]; then \ +gstat -c "%s" $(1) ; \ +else \ +stat -f "%z" $(1) ; \ +fi endef diff --git a/core/combo/HOST_darwin-x86_64.mk b/core/combo/HOST_darwin-x86_64.mk index 251455f0bf6..ba984d23577 100644 --- a/core/combo/HOST_darwin-x86_64.mk +++ b/core/combo/HOST_darwin-x86_64.mk @@ -47,7 +47,13 @@ HOST_TOOLCHAIN_FOR_CLANG := $(HOST_TOOLCHAIN_ROOT) HOST_AR := $(AR) HOST_GLOBAL_CFLAGS += -isysroot $(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) -DMACOSX_DEPLOYMENT_TARGET=$(mac_sdk_version) +ifeq (,$(wildcard $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1)) +# libc++ header locations for XCode CLT 7.1+ +HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/usr/include/c++/v1 +else +# libc++ header locations for pre-XCode CLT 7.1+ HOST_GLOBAL_CPPFLAGS += -isystem $(mac_sdk_path)/Toolchains/XcodeDefault.xctoolchain/usr/include/c++/v1 +endif HOST_GLOBAL_LDFLAGS += -isysroot $(mac_sdk_root) -Wl,-syslibroot,$(mac_sdk_root) -mmacosx-version-min=$(mac_sdk_version) HOST_GLOBAL_CFLAGS += -fPIC -funwind-tables diff --git a/core/combo/arch/arm/armv7-a-neon.mk b/core/combo/arch/arm/armv7-a-neon.mk index 5d5b050098a..8dee4eaedc6 100644 --- a/core/combo/arch/arm/armv7-a-neon.mk +++ b/core/combo/arch/arm/armv7-a-neon.mk @@ -8,36 +8,46 @@ ARCH_ARM_HAVE_NEON := true local_arch_has_lpae := false -ifneq (,$(filter cortex-a15 krait denver,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT))) +ifneq (,$(filter cortex-a15 denver krait,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT))) # TODO: krait is not a cortex-a15, we set the variant to cortex-a15 so that # hardware divide operations are generated. This should be removed and a # krait CPU variant added to GCC. For clang we specify -mcpu for krait in # core/clang/arm.mk. - arch_variant_cflags := -mcpu=cortex-a15 + arch_variant_cflags := -mcpu=cortex-a15 -mfpu=neon-vfpv4 local_arch_has_lpae := true arch_variant_ldflags := \ -Wl,--no-fix-cortex-a8 else -ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),cortex-a8) - arch_variant_cflags := -mcpu=cortex-a8 +ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),cortex-a9) + arch_variant_cflags := -mcpu=cortex-a9 -mfpu=neon +else +ifneq (,$(filter cortex-a8 scorpion,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT))) + arch_variant_cflags := -mcpu=cortex-a8 -mfpu=neon arch_variant_ldflags := \ -Wl,--fix-cortex-a8 else ifneq (,$(filter cortex-a7 cortex-a53 cortex-a53.a57,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT))) - arch_variant_cflags := -mcpu=cortex-a7 + arch_variant_cflags := -mcpu=cortex-a7 -mfpu=neon-vfpv4 local_arch_has_lpae := true arch_variant_ldflags := \ -Wl,--no-fix-cortex-a8 else - arch_variant_cflags := -march=armv7-a +ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),cortex-a5) + arch_variant_cflags := -mcpu=cortex-a7 -mfpu=neon-vfpv4 + arch_variant_ldflags := \ + -Wl,--no-fix-cortex-a8 +else + arch_variant_cflags := -march=armv7-a -mfpu=neon # Generic ARM might be a Cortex A8 -- better safe than sorry arch_variant_ldflags := \ -Wl,--fix-cortex-a8 endif endif endif +endif +endif ifeq (true,$(local_arch_has_lpae)) # Fake an ARM compiler flag as these processors support LPAE which GCC/clang @@ -50,5 +60,4 @@ endif local_arch_has_lpae := arch_variant_cflags += \ - -mfloat-abi=softfp \ - -mfpu=neon + -mfloat-abi=softfp diff --git a/core/combo/mac_version.mk b/core/combo/mac_version.mk index 51394c64b16..8fa08c6e655 100644 --- a/core/combo/mac_version.mk +++ b/core/combo/mac_version.mk @@ -9,7 +9,7 @@ ifndef build_mac_version build_mac_version := $(shell sw_vers -productVersion) -mac_sdk_versions_supported := 10.8 10.9 10.10 10.11 +mac_sdk_versions_supported := 10.8 10.9 10.10 10.11 10.12 ifneq ($(strip $(MAC_SDK_VERSION)),) mac_sdk_version := $(MAC_SDK_VERSION) ifeq ($(filter $(mac_sdk_version),$(mac_sdk_versions_supported)),) diff --git a/core/combo/select.mk b/core/combo/select.mk index df12e7e3845..97d62c6845f 100644 --- a/core/combo/select.mk +++ b/core/combo/select.mk @@ -33,9 +33,9 @@ $(combo_var_prefix)CXX := $(CXX) $(combo_var_prefix)AR := $(AR) $(combo_var_prefix)STRIP := $(STRIP) -$(combo_var_prefix)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar -$(combo_var_prefix)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing -$(combo_var_prefix)GLOBAL_CPPFLAGS := +$(combo_var_prefix)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar $(BOARD_GLOBAL_CFLAGS) +$(combo_var_prefix)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing $(BOARD_RELEASE_CFLAGS) +$(combo_var_prefix)GLOBAL_CPPFLAGS := $(BOARD_GLOBAL_CPPFLAGS) $(combo_var_prefix)GLOBAL_LDFLAGS := $(combo_var_prefix)GLOBAL_ARFLAGS := crsPD $(combo_var_prefix)GLOBAL_LD_DIRS := diff --git a/core/config.mk b/core/config.mk index 94c880f7a41..8a283b2471f 100644 --- a/core/config.mk +++ b/core/config.mk @@ -3,6 +3,24 @@ # current configuration and platform, which # are not specific to what is being built. +# These may be used to trace makefile issues without interfering with +# envsetup.sh. Usage: +# $(call ainfo,some info message) +# $(call aerror,some error message) +ifdef CALLED_FROM_SETUP +define ainfo +endef +define aerror +endef +else +define ainfo +$(info $(1)) +endef +define aerror +$(error $(1)) +endef +endif + # Only use ANDROID_BUILD_SHELL to wrap around bash. # DO NOT use other shells such as zsh. ifdef ANDROID_BUILD_SHELL @@ -40,6 +58,8 @@ $(warning The build system needs unmodified output of grep.) $(error Please remove --color=always from your $$GREP_OPTIONS) endif +UNAME := $(shell uname -sm) + # Standard source directories. SRC_DOCS:= $(TOPDIR)docs # TODO: Enforce some kind of layering; only add include paths @@ -50,7 +70,6 @@ SRC_HEADERS := \ $(TOPDIR)system/media/audio/include \ $(TOPDIR)hardware/libhardware/include \ $(TOPDIR)hardware/libhardware_legacy/include \ - $(TOPDIR)hardware/ril/include \ $(TOPDIR)libnativehelper/include \ $(TOPDIR)frameworks/native/include \ $(TOPDIR)frameworks/native/opengl/include \ @@ -85,6 +104,7 @@ BUILD_EXECUTABLE:= $(BUILD_SYSTEM)/executable.mk BUILD_HOST_EXECUTABLE:= $(BUILD_SYSTEM)/host_executable.mk BUILD_PACKAGE:= $(BUILD_SYSTEM)/package.mk BUILD_PHONY_PACKAGE:= $(BUILD_SYSTEM)/phony_package.mk +BUILD_RRO_PACKAGE:= $(BUILD_SYSTEM)/build_rro_package.mk BUILD_HOST_PREBUILT:= $(BUILD_SYSTEM)/host_prebuilt.mk BUILD_PREBUILT:= $(BUILD_SYSTEM)/prebuilt.mk BUILD_MULTI_PREBUILT:= $(BUILD_SYSTEM)/multi_prebuilt.mk @@ -144,6 +164,9 @@ else JAVA_TMPDIR_ARG := endif +# Default shell is mksh. Other possible value is ash. +TARGET_SHELL := mksh + # ############################################################### # Include sub-configuration files # ############################################################### @@ -165,7 +188,14 @@ include $(BUILD_SYSTEM)/envsetup.mk # Pruned directory options used when using findleaves.py # See envsetup.mk for a description of SCAN_EXCLUDE_DIRS -FIND_LEAVES_EXCLUDES := $(addprefix --prune=, $(OUT_DIR) $(SCAN_EXCLUDE_DIRS) .repo .git) +FIND_LEAVES_EXCLUDES := $(addprefix --prune=, $(SCAN_EXCLUDE_DIRS) .repo .git) + +# General entries for project pathmap. Any entries listed here should +# be device and hardware independent. +$(call project-set-path-variant,recovery,RECOVERY_VARIANT,bootable/recovery) + +-include vendor/extra/BoardConfigExtra.mk +-include vendor/aicp/configs/BoardConfigCM.mk # The build system exposes several variables for where to find the kernel # headers: @@ -455,7 +485,6 @@ BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS)) # Tools that are prebuilts for TARGET_BUILD_APPS # -ACP := $(HOST_OUT_EXECUTABLES)/acp AIDL := $(HOST_OUT_EXECUTABLES)/aidl AAPT := $(HOST_OUT_EXECUTABLES)/aapt AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2 @@ -468,14 +497,23 @@ BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat DX := $(HOST_OUT_EXECUTABLES)/dx MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses +# Always use prebuilts for ckati and makeparallel +prebuilt_build_tools := prebuilts/build-tools +ifeq ($(filter address,$(SANITIZE_HOST)),) +prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/bin +else +prebuilt_build_tools_bin := $(prebuilt_build_tools)/$(HOST_PREBUILT_TAG)/asan/bin +endif +ACP := $(prebuilt_build_tools_bin)/acp +CKATI := $(prebuilt_build_tools_bin)/ckati +IJAR := $(prebuilt_build_tools_bin)/ijar +MAKEPARALLEL := $(prebuilt_build_tools_bin)/makeparallel +ZIPTIME := $(prebuilt_build_tools_bin)/ziptime + USE_PREBUILT_SDK_TOOLS_IN_PLACE := true # Override the definitions above for unbundled and PDK builds ifneq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) -prebuilt_sdk_tools := prebuilts/sdk/tools -prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin - -ACP := $(prebuilt_sdk_tools_bin)/acp AIDL := $(prebuilt_sdk_tools_bin)/aidl AAPT := $(prebuilt_sdk_tools_bin)/aapt AAPT2 := $(prebuilt_sdk_tools_bin)/aapt2 @@ -499,13 +537,21 @@ endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK # Generic tools. JACK := $(HOST_OUT_EXECUTABLES)/jack +ifeq ($(USE_HOST_LEX),yes) +LEX := flex +else LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39 +endif # The default PKGDATADIR built in the prebuilt bison is a relative path # external/bison/data. # To run bison from elsewhere you need to set up enviromental variable # BISON_PKGDATADIR. BISON_PKGDATADIR := $(PWD)/external/bison/data +ifeq ($(USE_HOST_BISON),yes) +BISON := $(HOST_OUT_EXECUTABLES)/bison +else BISON := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/bison/bison +endif YACC := $(BISON) -d YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm @@ -528,7 +574,9 @@ MKBOOTIMG := $(HOST_OUT_EXECUTABLES)/mkbootimg$(HOST_EXECUTABLE_SUFFIX) else MKBOOTIMG := $(BOARD_CUSTOM_MKBOOTIMG) endif +MKYAFFS2 := $(HOST_OUT_EXECUTABLES)/mkyaffs2image$(HOST_EXECUTABLE_SUFFIX) APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX) +MKIMAGE := $(HOST_OUT_EXECUTABLES)/mkimage$(HOST_EXECUTABLE_SUFFIX) FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX) MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX) BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX) @@ -570,13 +618,6 @@ FUTILITY := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/futility/futility VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh FEC := $(HOST_OUT_EXECUTABLES)/fec -ifndef TARGET_BUILD_APPS -ZIPTIME := $(HOST_OUT_EXECUTABLES)/ziptime$(HOST_EXECUTABLE_SUFFIX) -endif - -# ijar converts a .jar file to a smaller .jar file which only has its -# interfaces. -IJAR := $(HOST_OUT_EXECUTABLES)/ijar$(BUILD_EXECUTABLE_SUFFIX) DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX) # relocation packer @@ -600,12 +641,6 @@ ifeq ($(wildcard $(HOST_JDK_TOOLS_JAR)),) $(error Error: could not find jdk tools.jar at $(HOST_JDK_TOOLS_JAR), please check if your JDK was installed correctly) endif endif - -# Is the host JDK 64-bit version? -HOST_JDK_IS_64BIT_VERSION := -ifneq ($(filter 64-Bit, $(shell java -version 2>&1)),) -HOST_JDK_IS_64BIT_VERSION := true -endif endif # CALLED_FROM_SETUP not true # It's called md5 on Mac OS and md5sum on Linux @@ -627,6 +662,12 @@ else DEFAULT_SYSTEM_DEV_CERTIFICATE := build/target/product/security/testkey endif +# Rules for QCOM targets +include $(BUILD_SYSTEM)/qcom_target.mk + +# Rules for MTK targets +include $(BUILD_SYSTEM)/mtk_target.mk + # ############################################################### # Set up final options. # ############################################################### @@ -708,7 +749,8 @@ HOST_GLOBAL_LD_DIRS += -L$(HOST_OUT_INTERMEDIATE_LIBRARIES) TARGET_GLOBAL_LD_DIRS += -L$(TARGET_OUT_INTERMEDIATE_LIBRARIES) HOST_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $(HOST_OUT_HEADERS) -TARGET_PROJECT_INCLUDES:= $(SRC_HEADERS) $(TARGET_OUT_HEADERS) \ +TARGET_PROJECT_INCLUDES:= $(SRC_HEADERS) $(TOPDIR)$(call project-path-for,ril)/include \ + $(TARGET_OUT_HEADERS) \ $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) \ $(TARGET_PRODUCT_KERNEL_HEADERS) @@ -867,4 +909,28 @@ endif RSCOMPAT_32BIT_ONLY_API_LEVELS := 8 9 10 11 12 13 14 15 16 17 18 19 20 RSCOMPAT_NO_USAGEIO_API_LEVELS := 8 9 10 11 12 13 +# We might want to skip items listed in PRODUCT_COPY_FILES based on +# various target flags. This is useful for replacing a binary module with one +# built from source. This should be a list of destination files under $OUT +# +TARGET_COPY_FILES_OVERRIDES := \ + $(addprefix %:, $(strip $(TARGET_COPY_FILES_OVERRIDES))) + +ifneq ($(TARGET_COPY_FILES_OVERRIDES),) + PRODUCT_COPY_FILES := $(filter-out $(TARGET_COPY_FILES_OVERRIDES), $(PRODUCT_COPY_FILES)) +endif + +ifneq ($(AICP_BUILD),) +## We need to be sure the global selinux policies are included +## last, to avoid accidental resetting by device configs +$(eval include vendor/aicp/sepolicy/sepolicy.mk) + +# Include any vendor specific config.mk file +-include $(TOPDIR)vendor/*/build/core/config.mk + +# Include any vendor specific apicheck.mk file +-include $(TOPDIR)vendor/*/build/core/apicheck.mk + +endif + include $(BUILD_SYSTEM)/dumpvar.mk diff --git a/core/definitions.mk b/core/definitions.mk index 84ea80197e1..cb4b7dddc10 100644 --- a/core/definitions.mk +++ b/core/definitions.mk @@ -426,7 +426,7 @@ endef define find-subdir-assets $(sort $(if $(1),$(patsubst ./%,%, \ $(shell if [ -d $(1) ] ; then cd $(1) ; find -L ./ -not -name '.*' -and -type f -and -not -type l ; fi)), \ - $(warning Empty argument supplied to find-subdir-assets) \ + $(warning Empty argument supplied to find-subdir-assets in $(LOCAL_PATH)) \ )) endef @@ -438,6 +438,10 @@ define find-other-java-files $(call all-java-files-under,$(1)) endef +define find-other-aidl-files + $(call find-subdir-files,$(1) -name "*.aidl" -and -not -name ".*") +endef + define find-other-html-files $(call all-html-files-under,$(1)) endef @@ -723,12 +727,6 @@ define jack-lib-files $(foreach lib,$(1),$(call _jack-lib-full-classes,$(lib),$(2))) endef -# $(1): library name list -# $(2): Non-empty if IS_HOST_MODULE -define jack-lib-deps -$(call jack-lib-files,$(1),$(2)) -endef - ########################################################### ## Run rot13 on a string ## $(1): the string. Must be one line. @@ -1205,7 +1203,7 @@ endef ########################################################### define transform-cpp-to-o -@echo "target $(PRIVATE_ARM_MODE) C++: $(PRIVATE_MODULE) <= $<" +@echo "target $(PRIVATE_ARM_MODE) C++:"" $(PRIVATE_MODULE) <= $<" @mkdir -p $(dir $@) $(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \ $(addprefix -I , $(PRIVATE_C_INCLUDES)) \ @@ -1258,7 +1256,7 @@ $(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \ endef define transform-c-to-o-no-deps -@echo "target $(PRIVATE_ARM_MODE) C: $(PRIVATE_MODULE) <= $<" +@echo "target $(PRIVATE_ARM_MODE) C:"" $(PRIVATE_MODULE) <= $<" $(call transform-c-or-s-to-o-no-deps, \ $(PRIVATE_CFLAGS) \ $(PRIVATE_CONLYFLAGS) \ @@ -1267,7 +1265,7 @@ $(call transform-c-or-s-to-o-no-deps, \ endef define transform-s-to-o-no-deps -@echo "target asm: $(PRIVATE_MODULE) <= $<" +@echo "target asm:"" $(PRIVATE_MODULE) <= $<" $(call transform-c-or-s-to-o-no-deps, $(PRIVATE_ASFLAGS)) endef @@ -1298,7 +1296,7 @@ endef ########################################################### define transform-m-to-o-no-deps -@echo "target ObjC: $(PRIVATE_MODULE) <= $<" +@echo "target ObjC:"" $(PRIVATE_MODULE) <= $<" $(call transform-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_DEBUG_CFLAGS)) endef @@ -1312,7 +1310,7 @@ endef ########################################################### define transform-host-cpp-to-o -@echo "$($(PRIVATE_PREFIX)DISPLAY) C++: $(PRIVATE_MODULE) <= $<" +@echo "$($(PRIVATE_PREFIX)DISPLAY) C++:"" $(PRIVATE_MODULE) <= $<" @mkdir -p $(dir $@) $(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \ $(addprefix -I , $(PRIVATE_C_INCLUDES)) \ @@ -1363,12 +1361,12 @@ $(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \ endef define transform-host-c-to-o-no-deps -@echo "$($(PRIVATE_PREFIX)DISPLAY) C: $(PRIVATE_MODULE) <= $<" +@echo "$($(PRIVATE_PREFIX)DISPLAY) C:"" $(PRIVATE_MODULE) <= $<" $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_CONLYFLAGS) $(PRIVATE_DEBUG_CFLAGS)) endef define transform-host-s-to-o-no-deps -@echo "$($(PRIVATE_PREFIX)DISPLAY) asm: $(PRIVATE_MODULE) <= $<" +@echo "$($(PRIVATE_PREFIX)DISPLAY) asm:"" $(PRIVATE_MODULE) <= $<" $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_ASFLAGS)) endef @@ -1387,7 +1385,7 @@ endef ########################################################### define transform-host-m-to-o-no-deps -@echo "$($(PRIVATE_PREFIX)DISPLAY) ObjC: $(PRIVATE_MODULE) <= $<" +@echo "$($(PRIVATE_PREFIX)DISPLAY) ObjC:"" $(PRIVATE_MODULE) <= $<" $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_DEBUG_CFLAGS)) endef @@ -1527,7 +1525,7 @@ endef # Explicitly delete the archive first so that ar doesn't # try to add to an existing archive. define transform-o-to-static-lib -@echo "target StaticLib: $(PRIVATE_MODULE) ($@)" +@echo "target StaticLib:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) @rm -f $@ $(extract-and-include-target-whole-static-libs) @@ -1575,7 +1573,7 @@ endef # Explicitly delete the archive first so that ar doesn't # try to add to an existing archive. define transform-host-o-to-static-lib -@echo "$($(PRIVATE_PREFIX)DISPLAY) StaticLib: $(PRIVATE_MODULE) ($@)" +@echo "$($(PRIVATE_PREFIX)DISPLAY) StaticLib:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) @rm -f $@ $(extract-and-include-host-whole-static-libs) @@ -1619,13 +1617,13 @@ endef endif define transform-host-o-to-shared-lib -@echo "$($(PRIVATE_PREFIX)DISPLAY) SharedLib: $(PRIVATE_MODULE) ($@)" +@echo "$($(PRIVATE_PREFIX)DISPLAY) SharedLib:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-host-o-to-shared-lib-inner) endef define transform-host-o-to-package -@echo "$($(PRIVATE_PREFIX)DISPLAY) Package: $(PRIVATE_MODULE) ($@)" +@echo "$($(PRIVATE_PREFIX)DISPLAY) Package:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-host-o-to-shared-lib-inner) endef @@ -1661,7 +1659,7 @@ $(hide) $(PRIVATE_CXX) \ endef define transform-o-to-shared-lib -@echo "target SharedLib: $(PRIVATE_MODULE) ($@)" +@echo "target SharedLib:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-o-to-shared-lib-inner) endef @@ -1676,14 +1674,14 @@ ifneq ($(TARGET_BUILD_VARIANT),user) endif define transform-to-stripped -@echo "target Strip: $(PRIVATE_MODULE) ($@)" +@echo "target Strip:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(hide) $(PRIVATE_STRIP) --strip-all $< -o $@ \ $(if $(PRIVATE_NO_DEBUGLINK),,$(TARGET_STRIP_EXTRA)) endef define transform-to-stripped-keep-symbols -@echo "target Strip (keep symbols): $(PRIVATE_MODULE) ($@)" +@echo "target Strip (keep symbols):"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(hide) $(PRIVATE_OBJCOPY) \ `$(PRIVATE_READELF) -S $< | awk '/.debug_/ {print "-R " $$2}' | xargs` \ @@ -1732,7 +1730,7 @@ $(hide) $(PRIVATE_CXX) -pie \ endef define transform-o-to-executable -@echo "target Executable: $(PRIVATE_MODULE) ($@)" +@echo "target Executable:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-o-to-executable-inner) endef @@ -1775,7 +1773,7 @@ $(hide) $(PRIVATE_CXX) \ endef define transform-o-to-static-executable -@echo "target StaticExecutable: $(PRIVATE_MODULE) ($@)" +@echo "target StaticExecutable:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-o-to-static-executable-inner) endef @@ -1821,7 +1819,7 @@ endef endif define transform-host-o-to-executable -@echo "$($(PRIVATE_PREFIX)DISPLAY) Executable: $(PRIVATE_MODULE) ($@)" +@echo "$($(PRIVATE_PREFIX)DISPLAY) Executable:"" $(PRIVATE_MODULE) ($@)" @mkdir -p $(dir $@) $(transform-host-o-to-executable-inner) endef @@ -2053,7 +2051,13 @@ $(hide) if [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq ] ; the -extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \ $(PRIVATE_JAVACFLAGS) \ \@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq \ - || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \ + 2>$(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr \ + && ( [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr ] && \ + echo "`cat $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr`" 1>&2; \ + rm -f $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr ) \ + || ( [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr ] && \ + echo "`cat $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stderr`" 1>&2; \ + rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR); exit 41 ) \ fi $(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/tools/java-layers.py \ $(PRIVATE_JAVA_LAYERS_FILE) \@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq,) @@ -2082,7 +2086,7 @@ $(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@)) endef define transform-java-to-classes.jar -@echo "target Java: $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))" +@echo "target Java:"" $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))" $(call compile-java,$(TARGET_JAVAC),$(PRIVATE_BOOTCLASSPATH)) endef @@ -2129,7 +2133,7 @@ $(call call-jack) \ $(if $(PRIVATE_RMTYPEDEFS), \ -D jack.android.remove-typedef="true") \ $(addprefix --classpath ,$(strip \ - $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \ + $(call normalize-path-list,$(PRIVATE_JACK_SHARED_LIBRARIES)))) \ $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \ $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \ -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \ @@ -2174,7 +2178,7 @@ $(hide) if [ -s $@.java-source-list-uniq ] ; then \ $(strip $(PRIVATE_JACK_FLAGS)) \ $(strip $(PRIVATE_JACK_DEBUG_FLAGS)) \ $(addprefix --classpath ,$(strip \ - $(call normalize-path-list,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES)) $(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \ + $(call normalize-path-list,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES)) $(PRIVATE_JACK_SHARED_LIBRARIES)))) \ -D jack.import.resource.policy=keep-first \ -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \ -D jack.import.type.policy=keep-first \ @@ -2304,7 +2308,7 @@ $(call call-jack) \ $(if $(NO_OPTIMIZE_DX), \ -D jack.dex.optimize="false") \ $(addprefix --classpath ,$(strip \ - $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \ + $(call normalize-path-list,$(PRIVATE_JACK_SHARED_LIBRARIES)))) \ $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \ $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \ -D jack.import.resource.policy=keep-first \ @@ -2495,11 +2499,9 @@ endef # Remove dynamic timestamps from packages # -ifndef TARGET_BUILD_APPS define remove-timestamps-from-package $(hide) $(ZIPTIME) $@ endef -endif # Uncompress shared libraries embedded in an apk. # @@ -2533,7 +2535,7 @@ endef # Note: we intentionally don't clean PRIVATE_CLASS_INTERMEDIATES_DIR # in transform-java-to-classes for the sake of vm-tests. define transform-host-java-to-package -@echo "$($(PRIVATE_PREFIX)DISPLAY) Java: $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))" +@echo "$($(PRIVATE_PREFIX)DISPLAY) Java:"" $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))" $(call compile-java,$(HOST_JAVAC),$(PRIVATE_BOOTCLASSPATH)) endef @@ -2546,7 +2548,7 @@ endef # $(2): destination header define copy-one-header $(2): $(1) - @echo "Header: $$@" + @echo "Header:"" $$@" $$(copy-file-to-new-target-with-cp) endef @@ -2555,7 +2557,7 @@ endef # $(2): destination file define copy-one-file $(2): $(1) | $(ACP) - @echo "Copy: $$@" + @echo "Copy:"" $$@" $$(copy-file-to-target) endef @@ -2576,7 +2578,7 @@ endef # $(2): destination file, must end with .xml. define copy-xml-file-checked $(2): $(1) | $(ACP) - @echo "Copy xml: $$@" + @echo "Copy xml:"" $$@" $(hide) xmllint $$< >/dev/null # Don't print the xml file to stdout. $$(copy-file-to-target) endef @@ -2634,19 +2636,19 @@ endef # Copy a prebuilt file to a target location. define transform-prebuilt-to-target -@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt: $(PRIVATE_MODULE) ($@)" +@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target) endef # Copy a prebuilt file to a target location, using zipalign on it. define transform-prebuilt-to-target-with-zipalign -@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt APK: $(PRIVATE_MODULE) ($@)" +@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt APK:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target-with-zipalign) endef # Copy a prebuilt file to a target location, stripping "# comment" comments. define transform-prebuilt-to-target-strip-comments -@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt: $(PRIVATE_MODULE) ($@)" +@echo "$(if $(PRIVATE_IS_HOST_MODULE),host,target) Prebuilt:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target-strip-comments) endef @@ -2665,8 +2667,9 @@ endef ########################################################### ## Commands to call Proguard ########################################################### +@echo "Copying:"" $@" +@echo "Proguard:"" $@" define transform-jar-to-proguard -@echo Proguard: $@ $(hide) $(PROGUARD) -injars $< -outjars $@ $(PRIVATE_PROGUARD_FLAGS) \ $(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR)) endef @@ -2676,7 +2679,7 @@ endef ########################################################### define transform-generated-source -@echo "target Generated: $(PRIVATE_MODULE) <= $<" +@echo "target Generated:"" $(PRIVATE_MODULE) <= $<" @mkdir -p $(dir $@) $(hide) $(PRIVATE_CUSTOM_TOOL) endef diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk index 5df9dc30a90..d86f1d2ffed 100644 --- a/core/dex_preopt.mk +++ b/core/dex_preopt.mk @@ -23,21 +23,24 @@ DEX_PREOPT_DEFAULT ?= true # being used). To bundle everything one should set this to '%' SYSTEM_OTHER_ODEX_FILTER ?= app/% priv-app/% +# Method returning whether the install path $(1) should be for system_other. +install-on-system-other = $(filter-out $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1))))) + # The default values for pre-opting: always preopt PIC. # Conditional to building on linux, as dex2oat currently does not work on darwin. -ifeq ($(HOST_OS),linux) - WITH_DEXPREOPT_PIC ?= true - WITH_DEXPREOPT ?= true +#ifeq ($(HOST_OS),linux) +# WITH_DEXPREOPT_PIC ?= true +# WITH_DEXPREOPT ?= true # For an eng build only pre-opt the boot image. This gives reasonable performance and still # allows a simple workflow: building in frameworks/base and syncing. - ifeq (eng,$(TARGET_BUILD_VARIANT)) - WITH_DEXPREOPT_BOOT_IMG_ONLY ?= true - endif +# ifeq (eng,$(TARGET_BUILD_VARIANT)) +# WITH_DEXPREOPT_BOOT_IMG_ONLY ?= true +# endif # Add mini-debug-info to the boot classpath unless explicitly asked not to. - ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO)) - PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info - endif -endif +# ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO)) +# PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info +# endif +#endif GLOBAL_DEXPREOPT_FLAGS := ifeq ($(WITH_DEXPREOPT_PIC),true) diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk index acd4a02dd95..944f800635e 100644 --- a/core/dex_preopt_libart.mk +++ b/core/dex_preopt_libart.mk @@ -27,7 +27,12 @@ COMPILED_CLASSES := $(call word-colon,1,$(firstword \ # start of image reserved address space LIBART_IMG_HOST_BASE_ADDRESS := 0x60000000 + +ifneq ($(LIBART_IMG_BASE),) +LIBART_IMG_TARGET_BASE_ADDRESS := $(LIBART_IMG_BASE) +else LIBART_IMG_TARGET_BASE_ADDRESS := 0x70000000 +endif define get-product-default-property $(strip $(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES)))) @@ -66,7 +71,7 @@ endef # $(2): the full install path (including file name) of the corresponding .apk. ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true) define get-odex-installed-file-path -$(if $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(2)), +$(if $(call install-on-system-other, $(2)), $(call get-odex-file-path,$(1),$(patsubst $(TARGET_OUT)/%,$(TARGET_OUT_SYSTEM_OTHER)/%,$(2))), $(call get-odex-file-path,$(1),$(2))) endef diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk index b05d4da3d77..cbea522b4f9 100644 --- a/core/dex_preopt_odex_install.mk +++ b/core/dex_preopt_odex_install.mk @@ -42,7 +42,7 @@ endif # if installing into system, and odex are being installed into system_other, don't strip ifeq ($(BOARD_USES_SYSTEM_OTHER_ODEX),true) ifeq ($(LOCAL_DEX_PREOPT),true) -ifneq ($(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(my_module_path)),) +ifneq ($(call install-on-system-other, $(my_module_path)),) LOCAL_DEX_PREOPT := nostripping endif endif @@ -107,6 +107,17 @@ LOCAL_DEX_PREOPT_FLAGS := $(PRODUCT_DEX_PREOPT_DEFAULT_FLAGS) endif endif +ifneq (,$(filter $(PRODUCT_SYSTEM_SERVER_JARS) $(PRODUCT_SYSTEM_SERVER_APPS),$(LOCAL_MODULE))) + # Jars of system server, and apps loaded into system server should be + # compiled with the 'speed' compiler filter. + LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed +else + # If no compiler filter is specified, default to 'interpret-only' to save on storage. + ifeq (,$(filter --compiler-filter=%, $(LOCAL_DEX_PREOPT_FLAGS))) + LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=interpret-only + endif +endif + $(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS) endif diff --git a/core/distdir.mk b/core/distdir.mk index 51ec46efeff..b058dc26429 100644 --- a/core/distdir.mk +++ b/core/distdir.mk @@ -37,7 +37,7 @@ ifdef dist_goal define copy-one-dist-file $(3): $(2) $(2): $(1) - @echo "Dist: $$@" + @echo "Dist:"" $$@" $$(copy-file-to-new-target-with-cp) endef diff --git a/core/droiddoc.mk b/core/droiddoc.mk index f1435790f21..d29cbf4f060 100644 --- a/core/droiddoc.mk +++ b/core/droiddoc.mk @@ -167,7 +167,7 @@ $(full_target): \ $(full_java_lib_deps) \ $(LOCAL_MODULE_MAKEFILE_DEP) \ $(LOCAL_ADDITIONAL_DEPENDENCIES) - @echo Docs droiddoc: $(PRIVATE_OUT_DIR) + @echo "Docs droiddoc:"" $(PRIVATE_OUT_DIR)" $(hide) mkdir -p $(dir $@) $(addprefix $(hide) rm -rf ,$(PRIVATE_STUB_OUT_DIR)) $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \ @@ -204,7 +204,7 @@ else ## ## $(full_target): $(full_src_files) $(full_java_lib_deps) - @echo Docs javadoc: $(PRIVATE_OUT_DIR) + @echo "Docs javadoc:"" $(PRIVATE_OUT_DIR)" @mkdir -p $(dir $@) $(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \ $(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR)) @@ -245,7 +245,7 @@ ifeq ($(strip $(LOCAL_UNINSTALLABLE_MODULE)),) out_zip := $(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip $(out_zip): PRIVATE_DOCS_DIR := $(out_dir) $(out_zip): $(full_target) - @echo Package docs: $@ + @echo "Package docs:"" $@" @rm -f $@ @mkdir -p $(dir $@) $(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_DOCS_DIR) && zip -rqX $$F * ) diff --git a/core/dumpvar.mk b/core/dumpvar.mk index 1bd477765d4..b667c8665bb 100644 --- a/core/dumpvar.mk +++ b/core/dumpvar.mk @@ -3,6 +3,7 @@ print_build_config_vars := \ PLATFORM_VERSION_CODENAME \ PLATFORM_VERSION \ + AICP_VERSION \ TARGET_PRODUCT \ TARGET_BUILD_VARIANT \ TARGET_BUILD_TYPE \ @@ -24,6 +25,16 @@ print_build_config_vars := \ BUILD_ID \ OUT_DIR +ifeq ($(WITH_SU),true) +print_build_config_vars += \ + WITH_SU +endif + +ifeq ($(WITH_GMS),true) +print_build_config_vars += \ + WITH_GMS +endif + ifeq ($(TARGET_BUILD_PDK),true) print_build_config_vars += \ TARGET_BUILD_PDK \ diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk index 91fd27131f1..f7fb2068fe0 100644 --- a/core/dynamic_binary.mk +++ b/core/dynamic_binary.mk @@ -62,6 +62,14 @@ ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES) my_pack_module_relocations := false endif +# Likewise for recovery and utility executables +ifeq ($(LOCAL_MODULE_CLASS),RECOVERY_EXECUTABLES) + my_pack_module_relocations := false +endif +ifeq ($(LOCAL_MODULE_CLASS),UTILITY_EXECUTABLES) + my_pack_module_relocations := false +endif + # TODO (dimitry): Relocation packer is not yet available for darwin ifneq ($(HOST_OS),linux) my_pack_module_relocations := false @@ -88,7 +96,7 @@ endif symbolic_input := $(relocation_packer_output) symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem) $(symbolic_output) : $(symbolic_input) | $(ACP) - @echo "target Symbolic: $(PRIVATE_MODULE) ($@)" + @echo "target Symbolic:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target) ########################################################### @@ -153,11 +161,11 @@ else # use cp(1) instead. ifneq ($(LOCAL_ACP_UNAVAILABLE),true) $(strip_output): $(strip_input) | $(ACP) - @echo "target Unstripped: $(PRIVATE_MODULE) ($@)" + @echo "target Unstripped:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target) else $(strip_output): $(strip_input) - @echo "target Unstripped: $(PRIVATE_MODULE) ($@)" + @echo "target Unstripped:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target-with-cp) endif endif # my_strip_module diff --git a/core/envsetup.mk b/core/envsetup.mk index 6cfb6a1555a..2a4d0641fcf 100644 --- a/core/envsetup.mk +++ b/core/envsetup.mk @@ -54,7 +54,15 @@ ifneq (,$(findstring Macintosh,$(UNAME))) HOST_OS := darwin endif -HOST_OS_EXTRA:=$(shell python -c "import platform; print(platform.platform())") +HOST_OS_EXTRA := $(shell uname -rsm) +ifeq ($(HOST_OS),linux) + ifneq ($(wildcard /etc/os-release),) + HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME) + endif +else ifeq ($(HOST_OS),darwin) + HOST_OS_EXTRA += $(shell sw_vers -productVersion) +endif +HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA)) # BUILD_OS is the real host doing the build. BUILD_OS := $(HOST_OS) @@ -169,8 +177,8 @@ board_config_mk := # Now we can substitute with the real value of TARGET_COPY_OUT_VENDOR ifeq ($(TARGET_COPY_OUT_VENDOR),$(_vendor_path_placeholder)) TARGET_COPY_OUT_VENDOR := system/vendor -else ifeq ($(filter vendor system/vendor,$(TARGET_COPY_OUT_VENDOR)),) -$(error TARGET_COPY_OUT_VENDOR must be either 'vendor' or 'system/vendor', seeing '$(TARGET_COPY_OUT_VENDOR)'.) +else ifeq ($(filter vendor system/vendor system,$(TARGET_COPY_OUT_VENDOR)),) +$(error TARGET_COPY_OUT_VENDOR must be either 'vendor', 'system/vendor' or 'system', seeing '$(TARGET_COPY_OUT_VENDOR)'.) endif PRODUCT_COPY_FILES := $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),$(PRODUCT_COPY_FILES)) @@ -211,9 +219,13 @@ endif ifeq (,$(strip $(OUT_DIR))) ifeq (,$(strip $(OUT_DIR_COMMON_BASE))) +ifneq ($(TOPDIR),) OUT_DIR := $(TOPDIR)out else -OUT_DIR := $(OUT_DIR_COMMON_BASE)/$(notdir $(PWD)) +OUT_DIR := $(CURDIR)/out +endif +else +OUT_DIR := $(OUT_DIR_COMMON_BASE:/=)/$(notdir $(PWD)) endif endif diff --git a/core/executable.mk b/core/executable.mk index e22ea0ec00d..5a33db7bbbb 100644 --- a/core/executable.mk +++ b/core/executable.mk @@ -42,6 +42,10 @@ endif my_skip_non_preferred_arch := +ifeq ($(LOCAL_SDCLANG), true) +include $(SDCLANG_FLAG_DEFS) +endif + # check if preferred arch is supported include $(BUILD_SYSTEM)/module_arch_supported.mk ifeq ($(my_module_arch_supported),true) @@ -80,4 +84,10 @@ LOCAL_NO_2ND_ARCH_MODULE_SUFFIX := my_module_arch_supported := +ifeq ($(LOCAL_SDCLANG), true) +ifeq ($(LOCAL_SDCLANG_LTO), true) +include $(SDCLANG_LTO_DEFS) +endif +endif + endif diff --git a/core/generate_extra_images.mk b/core/generate_extra_images.mk new file mode 100644 index 00000000000..664b963f13e --- /dev/null +++ b/core/generate_extra_images.mk @@ -0,0 +1,114 @@ +# This makefile is used to generate extra images for QCOM targets +# persist, device tree & NAND images required for different QCOM targets. + +# These variables are required to make sure that the required +# files/targets are available before generating NAND images. +# This file is included from device/qcom//AndroidBoard.mk +# and gets parsed before build/core/Makefile, which has these +# variables defined. build/core/Makefile will overwrite these +# variables again. +INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img +INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img +INSTALLED_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img + +#---------------------------------------------------------------------- +# Generate secure boot & recovery image +#---------------------------------------------------------------------- +ifeq ($(TARGET_BOOTIMG_SIGNED),true) +INSTALLED_SEC_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img.secure +INSTALLED_SEC_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img.secure + +intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch) +RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p + +ifndef TARGET_SHA_TYPE + TARGET_SHA_TYPE := sha256 +endif + +define build-sec-image + $(hide) mv -f $(1) $(1).nonsecure + $(hide) openssl dgst -$(TARGET_SHA_TYPE) -binary $(1).nonsecure > $(1).$(TARGET_SHA_TYPE) + $(hide) openssl rsautl -sign -in $(1).$(TARGET_SHA_TYPE) -inkey $(PRODUCT_PRIVATE_KEY) -out $(1).sig + $(hide) dd if=/dev/zero of=$(1).sig.padded bs=$(BOARD_KERNEL_PAGESIZE) count=1 + $(hide) dd if=$(1).sig of=$(1).sig.padded conv=notrunc + $(hide) cat $(1).nonsecure $(1).sig.padded > $(1).secure + $(hide) rm -rf $(1).$(TARGET_SHA_TYPE) $(1).sig $(1).sig.padded + $(hide) mv -f $(1).secure $(1) +endef + +$(INSTALLED_SEC_BOOTIMAGE_TARGET): $(INSTALLED_BOOTIMAGE_TARGET) $(RECOVERY_FROM_BOOT_PATCH) + $(hide) $(call build-sec-image,$(INSTALLED_BOOTIMAGE_TARGET)) + +ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_SEC_BOOTIMAGE_TARGET) +ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(INSTALLED_SEC_BOOTIMAGE_TARGET) + +$(INSTALLED_SEC_RECOVERYIMAGE_TARGET): $(INSTALLED_RECOVERYIMAGE_TARGET) $(RECOVERY_FROM_BOOT_PATCH) + $(hide) $(call build-sec-image,$(INSTALLED_RECOVERYIMAGE_TARGET)) + +ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_SEC_RECOVERYIMAGE_TARGET) +ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(INSTALLED_SEC_RECOVERYIMAGE_TARGET) +endif # TARGET_BOOTIMG_SIGNED + +#---------------------------------------------------------------------- +# Generate persist image (persist.img) +#---------------------------------------------------------------------- +TARGET_OUT_PERSIST := $(PRODUCT_OUT)/persist + +INTERNAL_PERSISTIMAGE_FILES := \ + $(filter $(TARGET_OUT_PERSIST)/%,$(ALL_DEFAULT_INSTALLED_MODULES)) + +INSTALLED_PERSISTIMAGE_TARGET := $(PRODUCT_OUT)/persist.img + +define build-persistimage-target + $(call pretty,"Target persist fs image: $(INSTALLED_PERSISTIMAGE_TARGET)") + @mkdir -p $(TARGET_OUT_PERSIST) + $(hide) $(MKEXTUSERIMG) -s $(TARGET_OUT_PERSIST) $@ ext4 persist $(BOARD_PERSISTIMAGE_PARTITION_SIZE) + $(hide) chmod a+r $@ + $(hide) $(call assert-max-image-size,$@,$(BOARD_PERSISTIMAGE_PARTITION_SIZE),yaffs) +endef + +$(INSTALLED_PERSISTIMAGE_TARGET): $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(INTERNAL_PERSISTIMAGE_FILES) + $(build-persistimage-target) + +ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_PERSISTIMAGE_TARGET) +ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(INSTALLED_PERSISTIMAGE_TARGET) + +#---------------------------------------------------------------------- +# Generate extra userdata images (for variants with multiple mmc sizes) +#---------------------------------------------------------------------- +ifneq ($(BOARD_USERDATAEXTRAIMAGE_PARTITION_SIZE),) + +ifndef BOARD_USERDATAEXTRAIMAGE_PARTITION_NAME + BOARD_USERDATAEXTRAIMAGE_PARTITION_NAME := extra +endif + +BUILT_USERDATAEXTRAIMAGE_TARGET := $(PRODUCT_OUT)/userdata_$(BOARD_USERDATAEXTRAIMAGE_PARTITION_NAME).img + +define build-userdataextraimage-target + $(call pretty,"Target EXTRA userdata fs image: $(INSTALLED_USERDATAEXTRAIMAGE_TARGET)") + @mkdir -p $(TARGET_OUT_DATA) + $(hide) $(MKEXTUSERIMG) -s $(TARGET_OUT_DATA) $@ ext4 data $(BOARD_USERDATAEXTRAIMAGE_PARTITION_SIZE) + $(hide) chmod a+r $@ + $(hide) $(call assert-max-image-size,$@,$(BOARD_USERDATAEXTRAIMAGE_PARTITION_SIZE),yaffs) +endef + +INSTALLED_USERDATAEXTRAIMAGE_TARGET := $(BUILT_USERDATAEXTRAIMAGE_TARGET) +$(INSTALLED_USERDATAEXTRAIMAGE_TARGET): $(INSTALLED_USERDATAIMAGE_TARGET) + $(build-userdataextraimage-target) + +ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_USERDATAEXTRAIMAGE_TARGET) +ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(INSTALLED_USERDATAEXTRAIMAGE_TARGET) + +endif + +.PHONY: aboot +aboot: $(INSTALLED_BOOTLOADER_MODULE) + +.PHONY: sec_bootimage +sec_bootimage: $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_SEC_BOOTIMAGE_TARGET) + +.PHONY: sec_recoveryimage +sec_recoveryimage: $(INSTALLED_RECOVERYIMAGE_TARGET) $(INSTALLED_SEC_RECOVERYIMAGE_TARGET) + +.PHONY: persistimage +persistimage: $(INSTALLED_PERSISTIMAGE_TARGET) diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk index 83047d4c3f2..2a55c056c47 100644 --- a/core/host_dalvik_java_library.mk +++ b/core/host_dalvik_java_library.mk @@ -28,7 +28,7 @@ include $(BUILD_SYSTEM)/host_java_library_common.mk ####################################### ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) - LOCAL_JAVA_LIBRARIES += core-oj-hostdex core-libart-hostdex + LOCAL_JAVA_LIBRARIES := core-oj-hostdex core-libart-hostdex $(LOCAL_JAVA_LIBRARIES) endif full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk index c296be359e6..1783d6fc267 100644 --- a/core/host_dalvik_static_java_library.mk +++ b/core/host_dalvik_static_java_library.mk @@ -55,6 +55,5 @@ $(full_classes_jack): $(java_sources) $(java_resource_sources) $(full_jack_deps) @echo Building with Jack: $@ $(java-to-jack) -USE_CORE_LIB_BOOTCLASSPATH := LOCAL_IS_STATIC_JAVA_LIBRARY := endif diff --git a/core/host_java_library.mk b/core/host_java_library.mk index 97079fd3784..84cff42ef2f 100644 --- a/core/host_java_library.mk +++ b/core/host_java_library.mk @@ -98,7 +98,7 @@ $(full_classes_emma_jar) : $(full_classes_jarjar_jar) | $(EMMA_JAR) $(transform-classes.jar-to-emma) $(built_javalib_jar) : $(full_classes_emma_jar) - @echo Copying: $@ + @echo "Copying:"" $@" $(hide) $(ACP) -fp $< $@ else # LOCAL_EMMA_INSTRUMENT diff --git a/core/java.mk b/core/java.mk index 2602daf53ac..5fa1d4b050b 100644 --- a/core/java.mk +++ b/core/java.mk @@ -323,7 +323,7 @@ else # build against the platform. LOCAL_AIDL_INCLUDES += $(FRAMEWORKS_BASE_JAVA_SRC_DIRS) endif # LOCAL_SDK_VERSION -$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES)) +$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES)) $(LOCAL_AIDL_FLAGS) $(aidl_java_sources): $(intermediates.COMMON)/src/%.java: \ $(LOCAL_PATH)/%.aidl \ @@ -412,7 +412,7 @@ ifdef full_classes_jar # PRIVATE_ vars to be preserved. $(full_classes_stubs_jar): PRIVATE_SOURCE_FILE := $(full_classes_jar) $(full_classes_stubs_jar) : $(full_classes_jar) | $(ACP) - @echo Copying $(PRIVATE_SOURCE_FILE) + @echo "Copying"" $(PRIVATE_SOURCE_FILE)" $(hide) $(ACP) -fp $(PRIVATE_SOURCE_FILE) $@ ALL_MODULES.$(LOCAL_MODULE).STUBS := $(full_classes_stubs_jar) @@ -451,11 +451,11 @@ $(full_classes_compiled_jar): \ ifneq ($(strip $(LOCAL_JARJAR_RULES)),) $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES) $(full_classes_jarjar_jar): $(full_classes_compiled_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR) - @echo JarJar: $@ + @echo "JarJar:"" $@" $(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@ else $(full_classes_jarjar_jar): $(full_classes_compiled_jar) | $(ACP) - @echo Copying: $@ + @echo "Copying:"" $@" $(hide) $(ACP) -fp $< $@ endif @@ -483,7 +483,7 @@ endif # Keep a copy of the jar just before proguard processing. $(full_classes_jar): $(full_classes_jar_source) | $(ACP) - @echo Copying: $@ + @echo "Copying:"" $@" $(hide) $(ACP) -fp $< $@ $(call define-jar-to-toc-rule, $(full_classes_jar)) @@ -619,7 +619,7 @@ endif endif # LOCAL_JACK_ENABLED is disabled $(built_dex): $(built_dex_intermediate) | $(ACP) - @echo Copying: $@ + @echo "Copying:"" $@" $(hide) mkdir -p $(dir $@) $(hide) rm -f $(dir $@)/classes*.dex $(hide) $(ACP) -fp $(dir $<)/classes*.dex $(dir $@) diff --git a/core/java_common.mk b/core/java_common.mk index 9b7d10fba2c..efa7038c6f1 100644 --- a/core/java_common.mk +++ b/core/java_common.mk @@ -317,54 +317,16 @@ full_static_jack_libs := \ $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JACK_LIBRARIES := $(full_static_jack_libs) -ifndef LOCAL_IS_HOST_MODULE -ifeq ($(LOCAL_SDK_VERSION),) -ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true) -my_bootclasspath := -else -my_bootclasspath := $(call jack-lib-files,core-oj core-libart) -endif -else # LOCAL_SDK_VERSION -ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current) -# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS. -my_bootclasspath := $(call jack-lib-files,android_stubs_current) -else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current) -my_bootclasspath := $(call jack-lib-files,android_system_stubs_current) -else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current) -my_bootclasspath := $(call jack-lib-files,android_test_stubs_current) -else -my_bootclasspath :=$(call jack-lib-files,sdk_v$(LOCAL_SDK_VERSION)) -endif # current, system_current, or test_current -endif # LOCAL_SDK_VERSION -$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath) - full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) -full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) +full_jack_deps := $(full_shared_jack_libs) + +ifndef LOCAL_IS_HOST_MODULE # Turn off .toc optimization for apps build as we cannot build dexdump. ifeq (,$(TARGET_BUILD_APPS)) full_jack_deps := $(patsubst %.jack, %.dex.toc, $(full_jack_deps)) endif - -else # LOCAL_IS_HOST_MODULE - -ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true) -ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true) -my_bootclasspath := -else -my_bootclasspath := $(call jack-lib-files,core-oj-hostdex core-libart-hostdex,$(LOCAL_IS_HOST_MODULE)) -endif -$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath) -# Compiling against the final jack library. If we want to add support for obfuscated library -# we'll need to change that to compile against the not obfuscated jack library. -full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) -full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) -else -$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := -full_shared_jack_libs := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) -full_jack_deps := $(full_shared_jack_libs) -endif # USE_CORE_LIB_BOOTCLASSPATH endif # !LOCAL_IS_HOST_MODULE -full_jack_libs := $(full_shared_jack_libs) $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH) +full_shared_jack_libs += $(LOCAL_JACK_CLASSPATH) full_jack_deps += $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH) ifndef LOCAL_IS_HOST_MODULE @@ -378,7 +340,6 @@ ifneq ($(apk_libraries),) # link against the jar with full original names (before proguard processing). full_shared_jack_libs += $(link_apk_jack_libraries) - full_jack_libs += $(link_apk_jack_libraries) full_jack_deps += $(link_apk_jack_libraries) endif @@ -388,14 +349,14 @@ endif ifdef LOCAL_INSTRUMENTATION_FOR # link against the jar with full original names (before proguard processing). link_instr_classes_jack := $(link_instr_intermediates_dir.COMMON)/classes.noshrob.jack - full_jack_libs += $(link_instr_classes_jack) + full_shared_jack_libs += $(link_instr_classes_jack) full_jack_deps += $(link_instr_classes_jack) endif # LOCAL_INSTRUMENTATION_FOR endif # !LOCAL_IS_HOST_MODULE # Propagate local configuration options to this target. -$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_JACK_LIBRARIES:= $(full_jack_libs) -$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES) +$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_JACK_SHARED_LIBRARIES:= $(full_shared_jack_libs) +$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES) endif # need_compile_java endif # LOCAL_JACK_ENABLED diff --git a/core/java_library.mk b/core/java_library.mk index 81a4a6a30ee..2e11bfb6c03 100644 --- a/core/java_library.mk +++ b/core/java_library.mk @@ -71,7 +71,7 @@ $(common_javalib.jar) : $(full_classes_proguard_jar) else $(common_javalib.jar) : $(full_classes_jar) endif - @echo "target Static Jar: $(PRIVATE_MODULE) ($@)" + @echo "target Static Jar:"" $(PRIVATE_MODULE) ($@)" $(copy-file-to-target) ifdef LOCAL_JACK_ENABLED @@ -87,7 +87,7 @@ $(common_javalib.jar): PRIVATE_DEX_FILE := $(built_dex) $(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar) $(common_javalib.jar): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS) $(common_javalib.jar) : $(built_dex) $(java_resource_sources) | $(ZIPTIME) - @echo "target Jar: $(PRIVATE_MODULE) ($@)" + @echo "target Jar:"" $(PRIVATE_MODULE) ($@)" ifdef LOCAL_JACK_ENABLED $(create-empty-package) else @@ -111,7 +111,7 @@ else # ! boot jar $(built_odex): PRIVATE_MODULE := $(LOCAL_MODULE) # Use pattern rule - we may have multiple built odex files. $(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(common_javalib.jar) - @echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)" + @echo "Dexpreopt Jar:"" $(PRIVATE_MODULE) ($@)" $(call dexpreopt-one-file,$<,$@) $(LOCAL_BUILT_MODULE) : $(common_javalib.jar) | $(ACP) diff --git a/core/main.mk b/core/main.mk index a612f835d5d..d6d9ff16e4d 100644 --- a/core/main.mk +++ b/core/main.mk @@ -123,8 +123,8 @@ endif $(shell mkdir -p $(OUT_DIR) && \ echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt && \ echo -n $(BUILD_DATETIME) > $(OUT_DIR)/build_date.txt) -BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt) -BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt) +BUILD_NUMBER_FROM_FILE := $(shell cat $(OUT_DIR)/build_number.txt) +BUILD_DATETIME_FROM_FILE := $(shell cat $(OUT_DIR)/build_date.txt) ifeq ($(HOST_OS),darwin) DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE) else @@ -136,6 +136,9 @@ endif # VTS-specific config. -include test/vts/tools/vts-tradefed/build/config.mk +# CMTS-specific config. +-include vendor/cmts/build/config.mk + # This allows us to force a clean build - included after the config.mk # environment setup is done, but before we generate any dependencies. This # file does the rm -rf inline so the deps which are all done below will @@ -228,7 +231,6 @@ $(info ************************************************************) $(info You asked for an OpenJDK based build but your version is) $(info $(java_version_str).) $(info ************************************************************) -$(error stop) endif # java version is not OpenJdk else # if requires_openjdk ifneq ($(shell echo '$(java_version_str)' | grep -i openjdk),) @@ -239,7 +241,6 @@ $(info You use OpenJDK but only Sun/Oracle JDK is supported.) $(info Please follow the machine setup instructions at) $(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html) $(info ************************************************************) -$(error stop) endif # java version is not Sun Oracle JDK endif # if requires_openjdk @@ -516,7 +517,12 @@ endif ifneq ($(ONE_SHOT_MAKEFILE),) # We've probably been invoked by the "mm" shell function # with a subdirectory's makefile. + +# No Makefiles to include if we are performing a mms/short-circuit build. Only +# the targets mentioned by main.mk and tasks/* are built (kernel, boot.img etc) +ifneq ($(ONE_SHOT_MAKEFILE),__none__) include $(ONE_SHOT_MAKEFILE) +endif # Change CUSTOM_MODULES to include only modules that were # defined by this makefile; this will install all of those # modules as a side-effect. Do this after including ONE_SHOT_MAKEFILE @@ -552,7 +558,7 @@ ifeq ($(USE_SOONG),true) subdir_makefiles := $(SOONG_ANDROID_MK) $(call filter-soong-makefiles,$(subdir_makefiles)) endif -$(foreach mk, $(subdir_makefiles),$(info including $(mk) ...)$(eval include $(mk))) +$(foreach mk, $(subdir_makefiles), $(eval include $(mk))) ifdef PDK_FUSION_PLATFORM_ZIP # Bring in the PDK platform.zip modules. @@ -1102,7 +1108,7 @@ $(foreach module,$(sample_MODULES),$(eval $(call \ sample_ADDITIONAL_INSTALLED := \ $(filter-out $(modules_to_install) $(modules_to_check) $(ALL_PREBUILT),$(sample_MODULES)) samplecode: $(sample_APKS_COLLECTION) - @echo "Collect sample code apks: $^" + @echo "Collect sample code apks:"" $^" # remove apks that are not intended to be installed. rm -f $(sample_ADDITIONAL_INSTALLED) endif # samplecode in $(MAKECMDGOALS) @@ -1112,7 +1118,7 @@ findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET) .PHONY: clean clean: - @rm -rf $(OUT_DIR)/* + @rm -rf $(OUT_DIR)/* $(OUT_DIR)/..?* $(OUT_DIR)/.[!.]* @echo "Entire build directory removed." .PHONY: clobber diff --git a/core/mtk_target.mk b/core/mtk_target.mk new file mode 100644 index 00000000000..02f7a2c67dd --- /dev/null +++ b/core/mtk_target.mk @@ -0,0 +1,16 @@ +# Bring in Mediatek helper macros too +include $(BUILD_SYSTEM)/mtk_utils.mk + +ifeq ($(BOARD_USES_MTK_HARDWARE),true) + mtk_flags := -DMTK_HARDWARE + + TARGET_GLOBAL_CFLAGS += $(mtk_flags) + TARGET_GLOBAL_CPPFLAGS += $(mtk_flags) + CLANG_TARGET_GLOBAL_CFLAGS += $(mtk_flags) + CLANG_TARGET_GLOBAL_CPPFLAGS += $(mtk_flags) + + 2ND_TARGET_GLOBAL_CFLAGS += $(mtk_flags) + 2ND_TARGET_GLOBAL_CPPFLAGS += $(mtk_flags) + 2ND_CLANG_TARGET_GLOBAL_CFLAGS += $(mtk_flags) + 2ND_CLANG_TARGET_GLOBAL_CPPFLAGS += $(mtk_flags) +endif diff --git a/core/mtk_utils.mk b/core/mtk_utils.mk new file mode 100755 index 00000000000..48fd6605443 --- /dev/null +++ b/core/mtk_utils.mk @@ -0,0 +1,5 @@ +# Board platforms lists to be used for +# TARGET_BOARD_PLATFORM specific featurization +MTK_BOARD_PLATFORMS := mt6592 +MTK_BOARD_PLATFORMS += mt6582 +MTK_BOARD_PLATFORMS += mt6572 diff --git a/core/ninja.mk b/core/ninja.mk index 9d0ff9a70b2..49c709eebd6 100644 --- a/core/ninja.mk +++ b/core/ninja.mk @@ -1,4 +1,8 @@ -NINJA ?= prebuilts/ninja/$(HOST_PREBUILT_TAG)/ninja +ifeq ($(filter address,$(SANITIZE_HOST)),) +NINJA ?= prebuilts/build-tools/$(HOST_PREBUILT_TAG)/bin/ninja +else +NINJA ?= prebuilts/build-tools/$(HOST_PREBUILT_TAG)/asan/bin/ninja +endif ifeq ($(USE_SOONG),true) USE_SOONG_FOR_KATI := true @@ -93,7 +97,7 @@ KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-mmma-$(call replace_space_and_slash,$( endif my_checksum_suffix := -my_ninja_suffix_too_long := $(filter 1, $(shell v='$(KATI_NINJA_SUFFIX)' && echo $$(($${$(pound)v} > 64)))) +my_ninja_suffix_too_long := $(filter 1, $(shell v='$(KATI_NINJA_SUFFIX)' && echo $$(($${$(pound)v} > 240)))) ifneq ($(my_ninja_suffix_too_long),) # Replace the suffix with a checksum if it gets too long. my_checksum_suffix := $(KATI_NINJA_SUFFIX) @@ -127,15 +131,24 @@ NINJA_REMOTE_NUM_JOBS ?= 500 NINJA_ARGS += -j$(NINJA_REMOTE_NUM_JOBS) else NINJA_MAKEPARALLEL := $(MAKEPARALLEL) --ninja + +# We never want Kati to see MAKEFLAGS, as forcefully overriding variables is +# terrible. The variables in MAKEFLAGS are still available in the environment, +# so if part of the build wants input from the user, it should be explicitly +# checking for an environment variable or using ?= +# +# makeparallel already clears MAKEFLAGS, so it's not necessary in the GOMA case +KATI_MAKEPARALLEL := MAKEFLAGS= endif ifeq ($(USE_SOONG),true) COMBINED_BUILD_NINJA := $(OUT_DIR)/combined$(KATI_NINJA_SUFFIX).ninja -$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) $(SOONG_ANDROID_MK) +$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) $(SOONG_ANDROID_MK) FORCE $(hide) echo "builddir = $(OUT_DIR)" > $(COMBINED_BUILD_NINJA) $(hide) echo "subninja $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA) $(hide) echo "subninja $(KATI_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA) + $(hide) echo "build $(COMBINED_BUILD_NINJA): phony $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA) else COMBINED_BUILD_NINJA := $(KATI_BUILD_NINJA) endif @@ -146,7 +159,7 @@ $(sort $(DEFAULT_GOAL) $(ANDROID_GOALS)) : ninja_wrapper .PHONY: ninja_wrapper ninja_wrapper: $(COMBINED_BUILD_NINJA) $(MAKEPARALLEL) @echo Starting build with ninja - +$(hide) export NINJA_STATUS="$(NINJA_STATUS)" && source $(KATI_ENV_SH) && $(NINJA_MAKEPARALLEL) $(NINJA) $(NINJA_GOALS) -C $(TOP) -f $(COMBINED_BUILD_NINJA) $(NINJA_ARGS) + +$(hide) export NINJA_STATUS="$(NINJA_STATUS)" && source $(KATI_ENV_SH) && exec $(NINJA_MAKEPARALLEL) $(NINJA) -d keepdepfile $(NINJA_GOALS) -C $(TOP) -f $(COMBINED_BUILD_NINJA) $(NINJA_ARGS) # Dummy Android.mk and CleanSpec.mk files so that kati won't recurse into the # out directory diff --git a/core/package_internal.mk b/core/package_internal.mk index 551f18e57b1..70acd301f45 100644 --- a/core/package_internal.mk +++ b/core/package_internal.mk @@ -140,9 +140,9 @@ my_overlay_resources := $(strip \ $(addprefix $(d)/, \ $(call find-subdir-assets,$(d))))) -my_res_resources := $(strip \ +my_res_resources := $(if $(my_res_dir),$(strip \ $(addprefix $(my_res_dir)/, \ - $(call find-subdir-assets,$(my_res_dir)))) + $(call find-subdir-assets,$(my_res_dir))))) all_resources := $(strip $(my_res_resources) $(my_overlay_resources)) @@ -436,11 +436,28 @@ framework_res_package_export_deps := $(framework_res_package_export) else # LOCAL_SDK_RES_VERSION framework_res_package_export := \ $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk + +ifneq ($(TARGET_DISABLE_CMSDK), true) +# Avoid possible circular dependency with our platform-res +ifneq ($(LOCAL_IGNORE_SUBDIR), true) +cm_plat_res_package_export := \ + $(call intermediates-dir-for,APPS,org.cyanogenmod.platform-res,,COMMON)/package-export.apk +endif # LOCAL_IGNORE_SUBDIR +endif + # We can't depend directly on the export.apk file; it won't get its # PRIVATE_ vars set up correctly if we do. Instead, depend on the # corresponding R.stamp file, which lists the export.apk as a dependency. framework_res_package_export_deps := \ $(dir $(framework_res_package_export))src/R.stamp + +ifneq ($(TARGET_DISABLE_CMSDK), true) +ifneq ($(LOCAL_IGNORE_SUBDIR), true) +cm_plat_res_package_export_deps := \ + $(dir $(cm_plat_res_package_export))src/R.stamp +endif # LOCAL_IGNORE_SUBDIR +endif + endif # LOCAL_SDK_RES_VERSION all_library_res_package_exports := \ $(framework_res_package_export) \ @@ -451,6 +468,16 @@ all_library_res_package_export_deps := \ $(framework_res_package_export_deps) \ $(foreach lib,$(LOCAL_RES_LIBRARIES),\ $(call intermediates-dir-for,APPS,$(lib),,COMMON)/src/R.stamp) + +ifneq ($(TARGET_DISABLE_CMSDK), true) +ifneq ($(LOCAL_IGNORE_SUBDIR), true) +all_library_res_package_exports += \ + $(cm_plat_res_package_export) +all_library_res_package_export_deps += \ + $(cm_plat_res_package_export_deps) +endif # LOCAL_IGNORE_SUBDIR +endif + $(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_res_package_export_deps) $(LOCAL_INTERMEDIATE_TARGETS): \ PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports) diff --git a/core/pathmap.mk b/core/pathmap.mk index effc8785685..803b4d9be6a 100644 --- a/core/pathmap.mk +++ b/core/pathmap.mk @@ -41,7 +41,6 @@ pathmap_INCL := \ libhardware_legacy:hardware/libhardware_legacy/include \ libril:hardware/ril/include \ opengl-tests-includes:frameworks/native/opengl/tests/include \ - recovery:bootable/recovery \ system-core:system/core/include \ audio:system/media/audio/include \ audio-effects:system/media/audio_effects/include \ @@ -63,6 +62,36 @@ define include-path-for $(foreach n,$(1),$(patsubst $(n):%,%,$(filter $(n):%,$(pathmap_INCL)))) endef +# Enter project path into pathmap +# +# $(1): name +# $(2): path +# +define project-set-path +$(eval pathmap_PROJ += $(1):$(2)) +endef + +# Enter variant project path into pathmap +# +# $(1): name +# $(2): variable to check +# $(3): base path +# +define project-set-path-variant + $(call project-set-path,$(1),$(strip \ + $(if $($(2)), \ + $(3)-$($(2)), \ + $(3)))) +endef + +# Returns the path to the requested module's include directory, +# relative to the root of the source tree. +# +# $(1): a list of modules (or other named entities) to find the projects for +define project-path-for +$(foreach n,$(1),$(patsubst $(n):%,%,$(filter $(n):%,$(pathmap_PROJ)))) +endef + # # Many modules expect to be able to say "#include ", # so make it easy for them to find the correct path. diff --git a/core/pdk_config.mk b/core/pdk_config.mk index c0aaacd9285..33a64b4fd76 100644 --- a/core/pdk_config.mk +++ b/core/pdk_config.mk @@ -71,7 +71,7 @@ endif endif $(_pdk_fusion_stamp) : $(PDK_FUSION_PLATFORM_ZIP) - @echo "Unzip $(dir $@) <- $<" + @echo "Unzip"" $(dir $@) <- $<" $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) $(hide) unzip -qo $< -d $(dir $@) $(call split-long-arguments,-touch,$(_pdk_fusion_files)) @@ -87,7 +87,7 @@ $(_pdk_fusion_files) : $(_pdk_fusion_stamp) # That's desired by us: we want only absent files from the platform zip package. # Copy with the last-modified time preserved, never follow symbolic links. $(PRODUCT_OUT)/% : $(_pdk_fusion_intermediates)/% $(_pdk_fusion_stamp) - @mkdir -p $(dir $@) + $(hide) mkdir -p $(dir $@) $(hide) rm -rf $@ $(hide) cp -fpPR $< $@ diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk index b6727fa6337..507ef22813b 100644 --- a/core/prebuilt_internal.mk +++ b/core/prebuilt_internal.mk @@ -237,6 +237,11 @@ LOCAL_DEX_PREOPT := false endif endif +# Disable dex-preopt of specific prebuilts to save space, if requested. +ifneq ($(filter $(DEXPREOPT_BLACKLIST),$(LOCAL_MODULE)),) +LOCAL_DEX_PREOPT := false +endif + ####################################### # defines built_odex along with rule to install odex include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk @@ -263,7 +268,9 @@ $(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs) $(built_module) : $(my_prebuilt_src_file) | $(ACP) $(ZIPALIGN) $(SIGNAPK_JAR) $(AAPT) $(transform-prebuilt-to-target) +ifneq ($(LOCAL_MODULE_PATH),$(TARGET_OUT_VENDOR)/bundled-app) $(uncompress-shared-libs) +endif ifdef LOCAL_DEX_PREOPT ifneq ($(BUILD_PLATFORM_ZIP),) @# Keep a copy of apk with classes.dex unstripped diff --git a/core/product-graph.mk b/core/product-graph.mk index 36e903723aa..1030c6aab04 100644 --- a/core/product-graph.mk +++ b/core/product-graph.mk @@ -70,7 +70,7 @@ $(products_graph): PRIVATE_PRODUCTS := $(really_all_products) $(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list) $(products_graph): $(this_makefile) - @echo Product graph DOT: $@ for $(PRIVATE_PRODUCTS_FILTER) + @echo "Product graph DOT:"" $@ for $(PRIVATE_PRODUCTS_FILTER)" $(hide) echo 'digraph {' > $@.in $(hide) echo 'graph [ ratio=.5 ];' >> $@.in $(hide) $(foreach p,$(PRIVATE_PRODUCTS), \ @@ -89,7 +89,7 @@ endef # $(1) product file define transform-product-debug $(OUT_DIR)/products/$(strip $(1)).txt: $(this_makefile) - @echo Product debug info file: $$@ + @echo "Product debug info file:"" $$@" $(hide) rm -f $$@ $(hide) mkdir -p $$(dir $$@) $(hide) echo 'FILE=$(strip $(1))' >> $$@ @@ -105,6 +105,7 @@ $(OUT_DIR)/products/$(strip $(1)).txt: $(this_makefile) $(hide) echo 'PRODUCT_DEFAULT_PROPERTY_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)' >> $$@ $(hide) echo 'PRODUCT_CHARACTERISTICS=$$(PRODUCTS.$(strip $(1)).PRODUCT_CHARACTERISTICS)' >> $$@ $(hide) echo 'PRODUCT_COPY_FILES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES)' >> $$@ + $(hide) echo 'PRODUCT_COPY_FILES_OVERRIDES=$$(PRODUCTS.$(strip $(1)).PRODUCT_COPY_FILES_OVERRIDES)' >> $$@ $(hide) echo 'PRODUCT_OTA_PUBLIC_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_OTA_PUBLIC_KEYS)' >> $$@ $(hide) echo 'PRODUCT_EXTRA_RECOVERY_KEYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_EXTRA_RECOVERY_KEYS)' >> $$@ $(hide) echo 'PRODUCT_PACKAGE_OVERLAYS=$$(PRODUCTS.$(strip $(1)).PRODUCT_PACKAGE_OVERLAYS)' >> $$@ @@ -122,7 +123,7 @@ $(call product-debug-filename, $(p)): \ $(OUT_DIR)/products/$(strip $(1)).txt \ build/tools/product_debug.py \ $(this_makefile) - @echo Product debug html file: $$@ + @echo "Product debug html file:"" $$@" $(hide) mkdir -p $$(dir $$@) $(hide) cat $$< | build/tools/product_debug.py > $$@ endef @@ -134,11 +135,11 @@ $(foreach p,$(really_all_products), \ ) $(products_pdf): $(products_graph) - @echo Product graph PDF: $@ + @echo "Product graph PDF:"" $@" dot -Tpdf -Nshape=box -o $@ $< $(products_svg): $(products_graph) $(product_debug_files) - @echo Product graph SVG: $@ + @echo "Product graph SVG:"" $@" dot -Tsvg -Nshape=box -o $@ $< product-graph: $(products_pdf) $(products_svg) diff --git a/core/product.mk b/core/product.mk index 332b015fd8f..c3af3b20484 100644 --- a/core/product.mk +++ b/core/product.mk @@ -73,6 +73,8 @@ endef # _product_var_list := \ + PRODUCT_BOOTANIMATION \ + PRODUCT_BUILD_PROP_OVERRIDES \ PRODUCT_NAME \ PRODUCT_MODEL \ PRODUCT_LOCALES \ @@ -90,6 +92,7 @@ _product_var_list := \ PRODUCT_DEFAULT_PROPERTY_OVERRIDES \ PRODUCT_CHARACTERISTICS \ PRODUCT_COPY_FILES \ + PRODUCT_COPY_FILES_OVERRIDES \ PRODUCT_OTA_PUBLIC_KEYS \ PRODUCT_EXTRA_RECOVERY_KEYS \ PRODUCT_PACKAGE_OVERLAYS \ @@ -111,6 +114,7 @@ _product_var_list := \ PRODUCT_SUPPORTS_VERITY_FEC \ PRODUCT_OEM_PROPERTIES \ PRODUCT_SYSTEM_PROPERTY_BLACKLIST \ + PRODUCT_SYSTEM_SERVER_APPS \ PRODUCT_SYSTEM_SERVER_JARS \ PRODUCT_VBOOT_SIGNING_KEY \ PRODUCT_VBOOT_SIGNING_SUBKEY \ @@ -139,19 +143,37 @@ $(foreach p,$(PRODUCTS),$(call dump-product,$(p))) endef # -# $(1): product to inherit +# Internal function. Appends inherited product variables to an existing one. # -# Does three things: -# 1. Inherits all of the variables from $1. -# 2. Records the inheritance in the .INHERITS_FROM variable -# 3. Records that we've visited this node, in ALL_PRODUCTS +# $(1): Product variable to operate on +# $(2): Value to append # -define inherit-product +define inherit-product_append-var + $(if $(findstring ../,$(2)),\ + $(eval np := $(call normalize-paths,$(2))),\ + $(eval np := $(strip $(2))))\ + $(eval $(1) := $($(1)) $(INHERIT_TAG)$(np)) +endef + +# +# Internal function. Prepends inherited product variables to an existing one. +# +# $(1): Product variable to operate on +# $(2): Value to prepend +# +define inherit-product_prepend-var + $(eval $(1) := $(INHERIT_TAG)$(strip $(2)) $($(1))) +endef + +# +# Internal function. Tracks visited notes during inheritance resolution. +# +# $(1): Product being inherited +# +define inherit-product_track-node $(if $(findstring ../,$(1)),\ $(eval np := $(call normalize-paths,$(1))),\ $(eval np := $(strip $(1))))\ - $(foreach v,$(_product_var_list), \ - $(eval $(v) := $($(v)) $(INHERIT_TAG)$(np))) \ $(eval inherit_var := \ PRODUCTS.$(strip $(word 1,$(_include_stack))).INHERITS_FROM) \ $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \ @@ -159,6 +181,34 @@ define inherit-product $(eval ALL_PRODUCTS := $(sort $(ALL_PRODUCTS) $(word 1,$(_include_stack)))) endef +# +# $(1): product to inherit +# +# Does three things: +# 1. Inherits all of the variables from $1, prioritizing existing settings. +# 2. Records the inheritance in the .INHERITS_FROM variable +# 3. Records that we've visited this node, in ALL_PRODUCTS +# + +define inherit-product + $(foreach v,$(_product_var_list), \ + $(call inherit-product_append-var,$(v),$(1))) \ + $(call inherit-product_track-node,$(1)) +endef + +# +# $(1): product to inherit +# +# Does three things: +# 1. Inherits all of the variables from $1, prioritizing inherited settings. +# 2. Records the inheritance in the .INHERITS_FROM variable +# 3. Records that we've visited this node, in ALL_PRODUCTS +# +define prepend-product + $(foreach v,$(_product_var_list), \ + $(call inherit-product_prepend-var,$(v),$(1))) \ + $(call inherit-product_track-node,$(1)) +endef # # Do inherit-product only if $(1) exists @@ -167,6 +217,13 @@ define inherit-product-if-exists $(if $(wildcard $(1)),$(call inherit-product,$(1)),) endef +# +# Do inherit-product-prepend only if $(1) exists +# +define prepend-product-if-exists + $(if $(wildcard $(1)),$(call prepend-product,$(1)),) +endef + # # $(1): product makefile list # @@ -292,6 +349,10 @@ _product_stash_var_list += \ GLOBAL_CPPFLAGS_NO_OVERRIDE \ GLOBAL_CLANG_CFLAGS_NO_OVERRIDE \ +_product_stash_var_list += \ + TARGET_SKIP_DEFAULT_LOCALE \ + TARGET_SKIP_PRODUCT_DEVICE \ + # # Stash values of the variables in _product_stash_var_list. # $(1): Renamed prefix diff --git a/core/product_config.mk b/core/product_config.mk index 6438d51bee9..f66ab84db5a 100644 --- a/core/product_config.mk +++ b/core/product_config.mk @@ -179,16 +179,26 @@ include $(BUILD_SYSTEM)/node_fns.mk include $(BUILD_SYSTEM)/product.mk include $(BUILD_SYSTEM)/device.mk -ifneq ($(strip $(TARGET_BUILD_APPS)),) -# An unbundled app build needs only the core product makefiles. -all_product_configs := $(call get-product-makefiles,\ - $(SRC_TARGET_DIR)/product/AndroidProducts.mk) +# A AICP build needs only the AICP product makefiles. +ifneq ($(AICP_BUILD),) + all_product_configs := $(shell ls vendor/aicp/products/${AICP_BUILD}.mk 2>/dev/null) + # easy to port AICP to unoffcial devices. + ifeq ($(strip $(all_product_configs)),) + all_product_configs := $(shell find device -path "*/${AICP_BUILD}/aicp.mk") + endif else -# Read in all of the product definitions specified by the AndroidProducts.mk -# files in the tree. -all_product_configs := $(get-all-product-makefiles) -endif + ifneq ($(strip $(TARGET_BUILD_APPS)),) + # An unbundled app build needs only the core product makefiles. + all_product_configs := $(call get-product-makefiles,\ + $(SRC_TARGET_DIR)/product/AndroidProducts.mk) + else + # Read in all of the product definitions specified by the AndroidProducts.mk + # files in the tree. + all_product_configs := $(get-all-product-makefiles) + endif # TARGET_BUILD_APPS +endif # AICP_BUILD +ifeq ($(AICP_BUILD),) # Find the product config makefile for the current product. # all_product_configs consists items like: # : @@ -207,9 +217,14 @@ $(foreach f, $(all_product_configs),\ $(eval all_product_makefiles += $(f))\ $(if $(filter $(TARGET_PRODUCT),$(basename $(notdir $(f)))),\ $(eval current_product_makefile += $(f)),))) + _cpm_words := _cpm_word1 := _cpm_word2 := +else + current_product_makefile := $(strip $(all_product_configs)) + all_product_makefiles := $(strip $(all_product_configs)) +endif current_product_makefile := $(strip $(current_product_makefile)) all_product_makefiles := $(strip $(all_product_makefiles)) @@ -264,6 +279,7 @@ all_product_configs := # A list of module names of BOOTCLASSPATH (jar files) PRODUCT_BOOT_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BOOT_JARS)) PRODUCT_SYSTEM_SERVER_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_JARS)) +PRODUCT_SYSTEM_SERVER_APPS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_APPS)) # Find the device that this product maps to. TARGET_DEVICE := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEVICE) @@ -345,6 +361,28 @@ endif # The optional : is used to indicate the owner of a vendor file. PRODUCT_COPY_FILES := \ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COPY_FILES)) +_boot_animation := $(strip $(lastword $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BOOTANIMATION))) +ifneq ($(_boot_animation),) +PRODUCT_COPY_FILES += \ + $(_boot_animation):system/media/bootanimation.zip +endif +_boot_animation := + +# We might want to skip items listed in PRODUCT_COPY_FILES for +# various reasons. This is useful for replacing a binary module with one +# built from source. This should be a list of destination files under $OUT +PRODUCT_COPY_FILES_OVERRIDES := \ + $(addprefix %:, $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_COPY_FILES_OVERRIDES))) + +ifneq ($(PRODUCT_COPY_FILES_OVERRIDES),) + PRODUCT_COPY_FILES := $(filter-out $(PRODUCT_COPY_FILES_OVERRIDES), $(PRODUCT_COPY_FILES)) +endif + +.PHONY: listcopies +listcopies: + @echo "Copy files: $(PRODUCT_COPY_FILES)" + @echo "Overrides: $(PRODUCT_COPY_FILES_OVERRIDES)" + # A list of property assignments, like "key = value", with zero or more # whitespace characters on either side of the '='. @@ -363,6 +401,9 @@ endif PRODUCT_DEFAULT_PROPERTY_OVERRIDES := \ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEFAULT_PROPERTY_OVERRIDES)) +PRODUCT_BUILD_PROP_OVERRIDES := \ + $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BUILD_PROP_OVERRIDES)) + # Should we use the default resources or add any product specific overlays PRODUCT_PACKAGE_OVERLAYS := \ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGE_OVERLAYS)) diff --git a/core/qcom_target.mk b/core/qcom_target.mk new file mode 100644 index 00000000000..94492281fb2 --- /dev/null +++ b/core/qcom_target.mk @@ -0,0 +1,134 @@ +# Target-specific configuration + +# Bring in Qualcomm helper macros +include $(BUILD_SYSTEM)/qcom_utils.mk + +# Populate the qcom hardware variants in the project pathmap. +define ril-set-path-variant +$(call project-set-path-variant,ril,TARGET_RIL_VARIANT,hardware/$(1)) +endef +define wlan-set-path-variant +$(call project-set-path-variant,wlan,TARGET_WLAN_VARIANT,hardware/qcom/$(1)) +endef +define bt-vendor-set-path-variant +$(call project-set-path-variant,bt-vendor,TARGET_BT_VENDOR_VARIANT,hardware/qcom/$(1)) +endef + +# Set device-specific HALs into project pathmap +define set-device-specific-path +$(if $(USE_DEVICE_SPECIFIC_$(1)), \ + $(if $(DEVICE_SPECIFIC_$(1)_PATH), \ + $(eval path := $(DEVICE_SPECIFIC_$(1)_PATH)), \ + $(eval path := $(TARGET_DEVICE_DIR)/$(2))), \ + $(eval path := $(3))) \ +$(call project-set-path,qcom-$(2),$(strip $(path))) +endef + +ifeq ($(BOARD_USES_QCOM_HARDWARE),true) + B_FAMILY := msm8226 msm8610 msm8974 + B64_FAMILY := msm8992 msm8994 + BR_FAMILY := msm8909 msm8916 + UM_FAMILY := msm8937 msm8953 msm8996 + + qcom_flags := -DQCOM_HARDWARE + qcom_flags += -DQCOM_BSP + qcom_flags += -DQTI_BSP + + BOARD_USES_ADRENO := true + + TARGET_USES_QCOM_BSP := true + + # Tell HALs that we're compiling an AOSP build with an in-line kernel + TARGET_COMPILE_WITH_MSM_KERNEL := true + + ifneq ($(filter msm7x27a msm7x30 msm8660 msm8960,$(TARGET_BOARD_PLATFORM)),) + # Enable legacy graphics functions + qcom_flags += -DQCOM_BSP_LEGACY + # Enable legacy audio functions + ifeq ($(BOARD_USES_LEGACY_ALSA_AUDIO),true) + USE_CUSTOM_AUDIO_POLICY := 1 + qcom_flags += -DLEGACY_ALSA_AUDIO + endif + endif + + # Allow building audio encoders + TARGET_USES_QCOM_MM_AUDIO := true + + # Enable extra offloading for post-805 targets + ifneq ($(filter msm8992 msm8994,$(TARGET_BOARD_PLATFORM)),) + qcom_flags += -DHAS_EXTRA_FLAC_METADATA + endif + + # Enable color metadata for modern UM targets + ifneq ($(filter msm8996 msm8998 sdm660,$(TARGET_BOARD_PLATFORM)),) + TARGET_USES_COLOR_METADATA := true + endif + + # List of targets that use master side content protection + MASTER_SIDE_CP_TARGET_LIST := msm8996 msm8998 sdm660 + + TARGET_GLOBAL_CFLAGS += $(qcom_flags) + TARGET_GLOBAL_CPPFLAGS += $(qcom_flags) + CLANG_TARGET_GLOBAL_CFLAGS += $(qcom_flags) + CLANG_TARGET_GLOBAL_CPPFLAGS += $(qcom_flags) + + # Multiarch needs these too.. + 2ND_TARGET_GLOBAL_CFLAGS += $(qcom_flags) + 2ND_TARGET_GLOBAL_CPPFLAGS += $(qcom_flags) + 2ND_CLANG_TARGET_GLOBAL_CFLAGS += $(qcom_flags) + 2ND_CLANG_TARGET_GLOBAL_CPPFLAGS += $(qcom_flags) + + ifeq ($(call is-board-platform-in-list, $(B_FAMILY)),true) + MSM_VIDC_TARGET_LIST := $(B_FAMILY) + QCOM_HARDWARE_VARIANT := msm8974 + else + ifeq ($(call is-board-platform-in-list, $(B64_FAMILY)),true) + MSM_VIDC_TARGET_LIST := $(B64_FAMILY) + QCOM_HARDWARE_VARIANT := msm8994 + else + ifeq ($(call is-board-platform-in-list, $(BR_FAMILY)),true) + MSM_VIDC_TARGET_LIST := $(BR_FAMILY) + QCOM_HARDWARE_VARIANT := msm8916 + else + ifeq ($(call is-board-platform-in-list, $(UM_FAMILY)),true) + MSM_VIDC_TARGET_LIST := $(UM_FAMILY) + QCOM_HARDWARE_VARIANT := msm8996 + else + MSM_VIDC_TARGET_LIST := $(TARGET_BOARD_PLATFORM) + QCOM_HARDWARE_VARIANT := $(TARGET_BOARD_PLATFORM) + endif + endif + endif + endif + +$(call set-device-specific-path,AUDIO,audio,hardware/qcom/audio-caf/$(QCOM_HARDWARE_VARIANT)) +$(call set-device-specific-path,DISPLAY,display,hardware/qcom/display-caf/$(QCOM_HARDWARE_VARIANT)) +$(call set-device-specific-path,MEDIA,media,hardware/qcom/media-caf/$(QCOM_HARDWARE_VARIANT)) + +$(call set-device-specific-path,CAMERA,camera,hardware/qcom/camera) +$(call set-device-specific-path,GPS,gps,hardware/qcom/gps) +$(call set-device-specific-path,SENSORS,sensors,hardware/qcom/sensors) +$(call set-device-specific-path,LOC_API,loc-api,vendor/qcom/opensource/location) +$(call set-device-specific-path,DATASERVICES,dataservices,vendor/qcom/opensource/dataservices) + +$(call ril-set-path-variant,ril) +$(call wlan-set-path-variant,wlan-caf) +$(call bt-vendor-set-path-variant,bt-caf) + +else + +$(call project-set-path,qcom-audio,hardware/qcom/audio/default) +$(call project-set-path,qcom-display,hardware/qcom/display/$(TARGET_BOARD_PLATFORM)) +$(call project-set-path,qcom-media,hardware/qcom/media/$(TARGET_BOARD_PLATFORM)) + +$(call project-set-path,qcom-camera,hardware/qcom/camera) +$(call project-set-path,qcom-gps,hardware/qcom/gps) +$(call project-set-path,qcom-sensors,hardware/qcom/sensors) +$(call project-set-path,qcom-loc-api,vendor/qcom/opensource/location) +$(call project-set-path,qcom-dataservices,$(TARGET_DEVICE_DIR)/dataservices) + +$(call ril-set-path-variant,ril) +$(call wlan-set-path-variant,wlan) +$(call bt-vendor-set-path-variant,bt) + +endif diff --git a/core/qcom_utils.mk b/core/qcom_utils.mk new file mode 100755 index 00000000000..6da6f614738 --- /dev/null +++ b/core/qcom_utils.mk @@ -0,0 +1,244 @@ +# Board platforms lists to be used for +# TARGET_BOARD_PLATFORM specific featurization + +# A Family +QCOM_BOARD_PLATFORMS += msm7x27a +QCOM_BOARD_PLATFORMS += msm7x30 +QCOM_BOARD_PLATFORMS += msm8660 + +QCOM_BOARD_PLATFORMS += msm8960 + +# B Family +QCOM_BOARD_PLATFORMS += msm8226 +QCOM_BOARD_PLATFORMS += msm8610 +QCOM_BOARD_PLATFORMS += msm8974 + +QCOM_BOARD_PLATFORMS += apq8084 + +# B64 Family +QCOM_BOARD_PLATFORMS += msm8992 +QCOM_BOARD_PLATFORMS += msm8994 + +# BR Family +QCOM_BOARD_PLATFORMS += msm8909 +QCOM_BOARD_PLATFORMS += msm8916 + +QCOM_BOARD_PLATFORMS += msm8952 + +# UM Family +QCOM_BOARD_PLATFORMS += msm8937 +QCOM_BOARD_PLATFORMS += msm8953 +QCOM_BOARD_PLATFORMS += msm8996 +QCOM_BOARD_PLATFORMS += msm8998 + +QCOM_BOARD_PLATFORMS += sdm660 + +MSM7K_BOARD_PLATFORMS := msm7x30 +MSM7K_BOARD_PLATFORMS += msm7x27 +MSM7K_BOARD_PLATFORMS += msm7x27a +MSM7K_BOARD_PLATFORMS += msm7k + +QSD8K_BOARD_PLATFORMS := qsd8k + + +# vars for use by utils +empty := +space := $(empty) $(empty) +colon := $(empty):$(empty) +underscore := $(empty)_$(empty) + +# $(call match-word,w1,w2) +# checks if w1 == w2 +# How it works +# if (w1-w2 not empty or w2-w1 not empty) then not_match else match +# +# returns true or empty +#$(warning :$(1): :$(2): :$(subst $(1),,$(2)):) \ +#$(warning :$(2): :$(1): :$(subst $(2),,$(1)):) \ +# +define match-word +$(strip \ + $(if $(or $(subst $(1),$(empty),$(2)),$(subst $(2),$(empty),$(1))),,true) \ +) +endef + +# $(call find-word-in-list,w,wlist) +# finds an exact match of word w in word list wlist +# +# How it works +# fill wlist spaces with colon +# wrap w with colon +# search word w in list wl, if found match m, return stripped word w +# +# returns stripped word or empty +define find-word-in-list +$(strip \ + $(eval wl:= $(colon)$(subst $(space),$(colon),$(strip $(2)))$(colon)) \ + $(eval w:= $(colon)$(strip $(1))$(colon)) \ + $(eval m:= $(findstring $(w),$(wl))) \ + $(if $(m),$(1),) \ +) +endef + +# $(call match-word-in-list,w,wlist) +# does an exact match of word w in word list wlist +# How it works +# if the input word is not empty +# return output of an exact match of word w in wordlist wlist +# else +# return empty +# returns true or empty +define match-word-in-list +$(strip \ + $(if $(strip $(1)), \ + $(call match-word,$(call find-word-in-list,$(1),$(2)),$(strip $(1))), \ + ) \ +) +endef + +# $(call match-prefix,p,delim,w/wlist) +# matches prefix p in wlist using delimiter delim +# +# How it works +# trim the words in wlist w +# if find-word-in-list returns not empty +# return true +# else +# return empty +# +define match-prefix +$(strip \ + $(eval w := $(strip $(1)$(strip $(2)))) \ + $(eval text := $(patsubst $(w)%,$(1),$(3))) \ + $(if $(call match-word-in-list,$(1),$(text)),true,) \ +) +endef + +# ---- +# The following utilities are meant for board platform specific +# featurisation + +# $(call get-vendor-board-platforms,v) +# returns list of board platforms for vendor v +define get-vendor-board-platforms +$(if $(call match-word,$(BOARD_USES_$(1)_HARDWARE),true),$($(1)_BOARD_PLATFORMS)) +endef + +# $(call is-board-platform,bp) +# returns true or empty +define is-board-platform +$(call match-word,$(1),$(TARGET_BOARD_PLATFORM)) +endef + +# $(call is-not-board-platform,bp) +# returns true or empty +define is-not-board-platform +$(if $(call match-word,$(1),$(TARGET_BOARD_PLATFORM)),,true) +endef + +# $(call is-board-platform-in-list,bpl) +# returns true or empty +define is-board-platform-in-list +$(call match-word-in-list,$(TARGET_BOARD_PLATFORM),$(1)) +endef + +# $(call is-vendor-board-platform,vendor) +# returns true or empty +define is-vendor-board-platform +$(strip \ + $(call match-word-in-list,$(TARGET_BOARD_PLATFORM),\ + $(call get-vendor-board-platforms,$(1)) \ + ) \ +) +endef + +# $(call is-chipset-in-board-platform,chipset) +# does a prefix match of chipset in TARGET_BOARD_PLATFORM +# uses underscore as a delimiter +# +# returns true or empty +define is-chipset-in-board-platform +$(call match-prefix,$(1),$(underscore),$(TARGET_BOARD_PLATFORM)) +endef + +# $(call is-chipset-prefix-in-board-platform,prefix) +# does a chipset prefix match in TARGET_BOARD_PLATFORM +# assumes '_' and 'a' as the delimiter to the chipset prefix +# +# How it works +# if ($(prefix)_ or $(prefix)a match in board platform) +# return true +# else +# return empty +# +define is-chipset-prefix-in-board-platform +$(strip \ + $(eval delim_a := $(empty)a$(empty)) \ + $(if \ + $(or \ + $(call match-prefix,$(1),$(delim_a),$(TARGET_BOARD_PLATFORM)), \ + $(call match-prefix,$(1),$(underscore),$(TARGET_BOARD_PLATFORM)), \ + ), \ + true, \ + ) \ +) +endef + +#---- +# The following utilities are meant for Android Code Name +# specific featurisation +# +# refer http://source.android.com/source/build-numbers.html +# for code names and associated sdk versions +CUPCAKE_SDK_VERSIONS := 3 +DONUT_SDK_VERSIONS := 4 +ECLAIR_SDK_VERSIONS := 5 6 7 +FROYO_SDK_VERSIONS := 8 +GINGERBREAD_SDK_VERSIONS := 9 10 +HONEYCOMB_SDK_VERSIONS := 11 12 13 +ICECREAM_SANDWICH_SDK_VERSIONS := 14 15 +JELLY_BEAN_SDK_VERSIONS := 16 17 18 + +# $(call is-platform-sdk-version-at-least,version) +# version is a numeric SDK_VERSION defined above +define is-platform-sdk-version-at-least +$(strip \ + $(if $(filter 1,$(shell echo "$$(( $(PLATFORM_SDK_VERSION) >= $(1) ))" )), \ + true, \ + ) \ +) +endef + +# $(call is-android-codename,codename) +# codename is one of cupcake,donut,eclair,froyo,gingerbread,icecream +# please refer the $(codename)_SDK_VERSIONS declared above +define is-android-codename +$(strip \ + $(if \ + $(call match-word-in-list,$(PLATFORM_SDK_VERSION),$($(1)_SDK_VERSIONS)), \ + true, \ + ) \ +) +endef + +# $(call is-android-codename-in-list,cnlist) +# cnlist is combination/list of android codenames +define is-android-codename-in-list +$(strip \ + $(eval acn := $(empty)) \ + $(foreach \ + i,$(1),\ + $(eval acn += \ + $(if \ + $(call \ + match-word-in-list,\ + $(PLATFORM_SDK_VERSION),\ + $($(i)_SDK_VERSIONS)\ + ),\ + true,\ + )\ + )\ + ) \ + $(if $(strip $(acn)),true,) \ +) +endef diff --git a/core/shared_library.mk b/core/shared_library.mk index 2f483414a20..26c455da467 100644 --- a/core/shared_library.mk +++ b/core/shared_library.mk @@ -21,6 +21,10 @@ endif endif # my_module_multilib == both +ifeq ($(LOCAL_SDCLANG), true) +include $(SDCLANG_FLAG_DEFS) +endif + LOCAL_2ND_ARCH_VAR_PREFIX := include $(BUILD_SYSTEM)/module_arch_supported.mk @@ -48,6 +52,12 @@ LOCAL_2ND_ARCH_VAR_PREFIX := endif # TARGET_2ND_ARCH +ifeq ($(LOCAL_SDCLANG), true) +ifeq ($(LOCAL_SDCLANG_LTO), true) +include $(SDCLANG_LTO_DEFS) +endif +endif + my_module_arch_supported := ########################################################### diff --git a/core/static_java_library.mk b/core/static_java_library.mk index 1279878b592..d5ab8553e2e 100644 --- a/core/static_java_library.mk +++ b/core/static_java_library.mk @@ -160,7 +160,7 @@ include $(BUILD_SYSTEM)/aapt2.mk $(my_res_package) : $(framework_res_package_export_deps) else $(R_file_stamp) : $(all_resources) $(full_android_manifest) $(AAPT) $(framework_res_package_export_deps) - @echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)" + @echo "target R.java/Manifest.java:"" $(PRIVATE_MODULE) ($@)" $(create-resource-java-files) $(hide) find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name R.java | xargs cat > $@ endif # LOCAL_USE_AAPT2 @@ -180,6 +180,7 @@ $(built_aar): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest) $(built_aar): PRIVATE_CLASSES_JAR := $(full_classes_jar) $(built_aar): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR) $(built_aar): PRIVATE_R_TXT := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/R.txt +$(built_aar): PRIVATE_CONSUMER_PROGUARD_FILE := $(LOCAL_CONSUMER_PROGUARD_FILE) $(built_aar) : $(full_classes_jar) $(full_android_manifest) @echo "target AAR: $(PRIVATE_MODULE) ($@)" $(hide) rm -rf $(dir $@)aar && mkdir -p $(dir $@)aar/res @@ -188,6 +189,9 @@ $(built_aar) : $(full_classes_jar) $(full_android_manifest) # Note: Use "cp -n" to honor the resource overlay rules, if multiple res dirs exist. $(hide) $(foreach res,$(PRIVATE_RESOURCE_DIR),cp -Rfn $(res)/* $(dir $@)aar/res;) $(hide) cp $(PRIVATE_R_TXT) $(dir $@)aar/R.txt + $(hide) if [ ! -z "$(PRIVATE_CONSUMER_PROGUARD_FILE)" ]; then \ + echo "Including '$(PRIVATE_CONSUMER_PROGUARD_FILE)'"; \ + cp $(PRIVATE_CONSUMER_PROGUARD_FILE) $(dir $@)aar/proguard.txt; fi $(hide) jar -cMf $@ \ -C $(dir $@)aar . diff --git a/core/static_library.mk b/core/static_library.mk index a8ae399b68a..61a63d065d4 100644 --- a/core/static_library.mk +++ b/core/static_library.mk @@ -6,6 +6,10 @@ ifndef my_module_multilib my_module_multilib := both endif +ifeq ($(LOCAL_SDCLANG), true) +include $(SDCLANG_FLAG_DEFS) +endif + LOCAL_2ND_ARCH_VAR_PREFIX := include $(BUILD_SYSTEM)/module_arch_supported.mk @@ -33,6 +37,12 @@ LOCAL_2ND_ARCH_VAR_PREFIX := endif # TARGET_2ND_ARCH +ifeq ($(LOCAL_SDCLANG), true) +ifeq ($(LOCAL_SDCLANG_LTO), true) +include $(SDCLANG_LTO_DEFS) +endif +endif + my_module_arch_supported := ########################################################### diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk index 3975d2071e1..f45bf8c532b 100644 --- a/core/tasks/apicheck.mk +++ b/core/tasks/apicheck.mk @@ -76,9 +76,9 @@ $(eval $(call check-api, \ .PHONY: update-public-api update-public-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP) - @echo Copying current.txt + @echo "Copying current.txt" $(hide) $(ACP) $(INTERNAL_PLATFORM_API_FILE) frameworks/base/api/current.txt - @echo Copying removed.txt + @echo "Copying removed.txt" $(hide) $(ACP) $(INTERNAL_PLATFORM_REMOVED_API_FILE) frameworks/base/api/removed.txt update-api : update-public-api diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk index 188c267400a..28f2b826a39 100644 --- a/core/tasks/boot_jars_package_check.mk +++ b/core/tasks/boot_jars_package_check.mk @@ -16,6 +16,7 @@ # Rules to check if classes in the boot jars are from the whitelisted packages. # +ifneq ($(SKIP_BOOT_JARS_CHECK),) ifneq ($(SKIP_BOOT_JARS_CHECK),true) ifneq ($(TARGET_BUILD_PDK),true) ifdef PRODUCT_BOOT_JARS @@ -44,3 +45,4 @@ droidcore : check-boot-jars endif # PRODUCT_BOOT_JARS endif # TARGET_BUILD_PDK not true endif # SKIP_BOOT_JARS_CHECK not true +endif # SKIP_BOOT_JARS_CHECK not defined diff --git a/core/tasks/check_boot_jars/check_boot_jars.py b/core/tasks/check_boot_jars/check_boot_jars.py index 5a0ec401635..cd0bd75302f 100755 --- a/core/tasks/check_boot_jars/check_boot_jars.py +++ b/core/tasks/check_boot_jars/check_boot_jars.py @@ -1,5 +1,7 @@ #!/usr/bin/env python +from __future__ import print_function + """ Check boot jars. @@ -55,15 +57,15 @@ def CheckJar(jar): package_name = package_name.replace('/', '.') # Skip class without a package name if package_name and not whitelist_re.match(package_name): - print >> sys.stderr, ('Error: %s: unknown package name of class file %s' - % (jar, f)) + print('Error: %s: unknown package name of class file %s' + % (jar, f), file=sys.stderr) return False return True def main(argv): if len(argv) < 2: - print __doc__ + print(__doc__) return 1 if not LoadWhitelist(argv[0]): diff --git a/core/tasks/collect_gpl_sources.mk b/core/tasks/collect_gpl_sources.mk index 30ba62b6091..63bf2b0468b 100644 --- a/core/tasks/collect_gpl_sources.mk +++ b/core/tasks/collect_gpl_sources.mk @@ -17,7 +17,7 @@ gpl_source_tgz := $(call intermediates-dir-for,PACKAGING,gpl_source,HOST,COMMON) # FORCE since we can't know whether any of the sources changed $(gpl_source_tgz): PRIVATE_PATHS := $(sort $(patsubst %/, %, $(dir $(ALL_GPL_MODULE_LICENSE_FILES)))) $(gpl_source_tgz) : $(ALL_GPL_MODULE_LICENSE_FILES) FORCE - @echo Package gpl sources: $@ + @echo "Package gpl sources:"" $@" @rm -rf $(dir $@) && mkdir -p $(dir $@) $(hide) tar cfz $@ --exclude ".git*" $(PRIVATE_PATHS) diff --git a/core/tasks/dt_image.mk b/core/tasks/dt_image.mk new file mode 100644 index 00000000000..1e7adb6f96f --- /dev/null +++ b/core/tasks/dt_image.mk @@ -0,0 +1,58 @@ +#---------------------------------------------------------------------- +# Generate device tree image (dt.img) +#---------------------------------------------------------------------- +ifeq ($(strip $(BOARD_CUSTOM_BOOTIMG_MK)),) +ifeq ($(strip $(BOARD_KERNEL_SEPARATED_DT)),true) +ifneq ($(strip $(BOARD_KERNEL_PREBUILT_DT)),true) + +ifeq ($(strip $(TARGET_CUSTOM_DTBTOOL)),) +DTBTOOL_NAME := dtbToolCM +else +DTBTOOL_NAME := $(TARGET_CUSTOM_DTBTOOL) +endif + +DTBTOOL := $(HOST_OUT_EXECUTABLES)/$(DTBTOOL_NAME)$(HOST_EXECUTABLE_SUFFIX) + +INSTALLED_DTIMAGE_TARGET := $(PRODUCT_OUT)/dt.img + +ifeq ($(strip $(TARGET_CUSTOM_DTBTOOL)),) +# dtbToolCM will search subdirectories +possible_dtb_dirs = $(KERNEL_OUT)/arch/$(KERNEL_ARCH)/boot/ +else +# Most specific paths must come first in possible_dtb_dirs +possible_dtb_dirs = $(KERNEL_OUT)/arch/$(KERNEL_ARCH)/boot/dts/ $(KERNEL_OUT)/arch/$(KERNEL_ARCH)/boot/ +endif + +define build-dtimage-target + $(call pretty,"Target dt image: $@") + $(hide) for dir in $(possible_dtb_dirs); do \ + if [ -d "$$dir" ]; then \ + dtb_dir="$$dir"; \ + break; \ + fi; \ + done; \ + $(DTBTOOL) $(BOARD_DTBTOOL_ARGS) -o $@ -s $(BOARD_KERNEL_PAGESIZE) -p $(KERNEL_OUT)/scripts/dtc/ "$$dtb_dir"; + $(hide) chmod a+r $@ +endef + +ifeq ($(strip $(BOARD_KERNEL_LZ4C_DT)),true) +LZ4_DT_IMAGE := $(PRODUCT_OUT)/dt-lz4.img +endif + +$(INSTALLED_DTIMAGE_TARGET): $(DTBTOOL) $(INSTALLED_KERNEL_TARGET) + $(build-dtimage-target) +ifeq ($(strip $(BOARD_KERNEL_LZ4C_DT)),true) + lz4 -9 < $@ > $(LZ4_DT_IMAGE) || lz4c -c1 -y $@ $(LZ4_DT_IMAGE) + $(hide) $(ACP) $(LZ4_DT_IMAGE) $@ +endif + @echo "Made DT image: $@" + +ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_DTIMAGE_TARGET) +ALL_MODULES.$(LOCAL_MODULE).INSTALLED += $(INSTALLED_DTIMAGE_TARGET) + +.PHONY: dtimage +dtimage: $(INSTALLED_DTIMAGE_TARGET) + +endif +endif +endif diff --git a/core/tasks/kernel.mk b/core/tasks/kernel.mk new file mode 100644 index 00000000000..25e769d7af0 --- /dev/null +++ b/core/tasks/kernel.mk @@ -0,0 +1,383 @@ +# Copyright (C) 2012 The CyanogenMod Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Android makefile to build kernel as a part of Android Build +# +# Configuration +# ============= +# +# These config vars are usually set in BoardConfig.mk: +# +# TARGET_KERNEL_SOURCE = Kernel source dir, optional, defaults +# to kernel/$(TARGET_DEVICE_DIR) +# TARGET_KERNEL_CONFIG = Kernel defconfig +# TARGET_KERNEL_VARIANT_CONFIG = Variant defconfig, optional +# TARGET_KERNEL_SELINUX_CONFIG = SELinux defconfig, optional +# TARGET_KERNEL_ADDITIONAL_CONFIG = Additional defconfig, optional +# TARGET_KERNEL_ARCH = Kernel Arch +# TARGET_KERNEL_HEADER_ARCH = Optional Arch for kernel headers if +# different from TARGET_KERNEL_ARCH +# TARGET_USES_UNCOMPRESSED_KERNEL = 'true' if Kernel is uncompressed, +# optional, defaults to false +# TARGET_KERNEL_CROSS_COMPILE_PREFIX = Compiler prefix (e.g. arm-eabi-) +# defaults to arm-linux-androidkernel- for arm +# aarch64-linux-androidkernel- for arm64 +# x86_64-linux-androidkernel- for x86 +# +# BOARD_KERNEL_IMAGE_NAME = Built image name, optional, +# defaults to Image.gz on arm64 +# defaults to Image if TARGET_USES_UNCOMPRESSED_KERNEL +# defaults to zImage otherwise +# +# KERNEL_TOOLCHAIN_PREFIX = Overrides TARGET_KERNEL_CROSS_COMPILE_PREFIX, +# Set this var in shell to override +# toolchain specified in BoardConfig.mk +# KERNEL_TOOLCHAIN = Path to toolchain, if unset, assumes +# TARGET_KERNEL_CROSS_COMPILE_PREFIX +# is in PATH +# USE_CCACHE = Enable ccache (global Android flag) +# +# NEED_KERNEL_MODULE_ROOT = Optional, if true, install kernel +# modules in root instead of system + + +TARGET_AUTO_KDIR := $(shell echo $(TARGET_DEVICE_DIR) | sed -e 's/^device/kernel/g') + +## Externally influenced variables +# kernel location - optional, defaults to kernel// +TARGET_KERNEL_SOURCE ?= $(TARGET_AUTO_KDIR) +KERNEL_SRC := $(TARGET_KERNEL_SOURCE) +# kernel configuration - mandatory +KERNEL_DEFCONFIG := $(TARGET_KERNEL_CONFIG) +VARIANT_DEFCONFIG := $(TARGET_KERNEL_VARIANT_CONFIG) +SELINUX_DEFCONFIG := $(TARGET_KERNEL_SELINUX_CONFIG) + +## Internal variables +KERNEL_OUT := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ +KERNEL_CONFIG := $(KERNEL_OUT)/.config +KERNEL_OUT_STAMP := $(KERNEL_OUT)/.mkdir_stamp + +TARGET_KERNEL_ARCH := $(strip $(TARGET_KERNEL_ARCH)) +ifeq ($(TARGET_KERNEL_ARCH),) +KERNEL_ARCH := $(TARGET_ARCH) +else +KERNEL_ARCH := $(TARGET_KERNEL_ARCH) +endif + +ifeq ($(KERNEL_ARCH),x86_64) +KERNEL_DEFCONFIG_ARCH := x86 +else +KERNEL_DEFCONFIG_ARCH := $(KERNEL_ARCH) +endif +KERNEL_DEFCONFIG_SRC := $(KERNEL_SRC)/arch/$(KERNEL_DEFCONFIG_ARCH)/configs/$(KERNEL_DEFCONFIG) + +TARGET_KERNEL_HEADER_ARCH := $(strip $(TARGET_KERNEL_HEADER_ARCH)) +ifeq ($(TARGET_KERNEL_HEADER_ARCH),) +KERNEL_HEADER_ARCH := $(KERNEL_ARCH) +else +KERNEL_HEADER_ARCH := $(TARGET_KERNEL_HEADER_ARCH) +endif + +KERNEL_HEADER_DEFCONFIG := $(strip $(KERNEL_HEADER_DEFCONFIG)) +ifeq ($(KERNEL_HEADER_DEFCONFIG),) +KERNEL_HEADER_DEFCONFIG := $(KERNEL_DEFCONFIG) +endif + + +ifneq ($(BOARD_KERNEL_IMAGE_NAME),) + TARGET_PREBUILT_INT_KERNEL_TYPE := $(BOARD_KERNEL_IMAGE_NAME) +else + ifeq ($(TARGET_USES_UNCOMPRESSED_KERNEL),true) + TARGET_PREBUILT_INT_KERNEL_TYPE := Image + else + ifeq ($(KERNEL_ARCH),arm64) + TARGET_PREBUILT_INT_KERNEL_TYPE := Image.gz + else + TARGET_PREBUILT_INT_KERNEL_TYPE := zImage + endif + endif + ifeq ($(TARGET_KERNEL_APPEND_DTB),true) + TARGET_PREBUILT_INT_KERNEL_TYPE := $(TARGET_PREBUILT_INT_KERNEL_TYPE)-dtb + endif +endif + +TARGET_PREBUILT_INT_KERNEL := $(KERNEL_OUT)/arch/$(KERNEL_ARCH)/boot/$(TARGET_PREBUILT_INT_KERNEL_TYPE) + +# Clear this first to prevent accidental poisoning from env +MAKE_FLAGS := + +ifeq ($(KERNEL_ARCH),arm) + # Avoid "Unknown symbol _GLOBAL_OFFSET_TABLE_" errors + MAKE_FLAGS += CFLAGS_MODULE="-fno-pic" +endif + +ifeq ($(KERNEL_ARCH),arm64) + # Avoid "unsupported RELA relocation: 311" errors (R_AARCH64_ADR_GOT_PAGE) + MAKE_FLAGS += CFLAGS_MODULE="-fno-pic" + ifeq ($(TARGET_ARCH),arm) + KERNEL_CONFIG_OVERRIDE := CONFIG_ANDROID_BINDER_IPC_32BIT=y + endif +endif + +ifneq ($(TARGET_KERNEL_ADDITIONAL_CONFIG),) +KERNEL_ADDITIONAL_CONFIG := $(TARGET_KERNEL_ADDITIONAL_CONFIG) +KERNEL_ADDITIONAL_CONFIG_SRC := $(KERNEL_SRC)/arch/$(KERNEL_ARCH)/configs/$(KERNEL_ADDITIONAL_CONFIG) + ifeq ("$(wildcard $(KERNEL_ADDITIONAL_CONFIG_SRC))","") + $(warning TARGET_KERNEL_ADDITIONAL_CONFIG '$(TARGET_KERNEL_ADDITIONAL_CONFIG)' doesn't exist) + KERNEL_ADDITIONAL_CONFIG_SRC := /dev/null + endif +else + KERNEL_ADDITIONAL_CONFIG_SRC := /dev/null +endif + +ifeq "$(wildcard $(KERNEL_SRC) )" "" + ifneq ($(TARGET_PREBUILT_KERNEL),) + HAS_PREBUILT_KERNEL := true + NEEDS_KERNEL_COPY := true + else + $(foreach cf,$(PRODUCT_COPY_FILES), \ + $(eval _src := $(call word-colon,1,$(cf))) \ + $(eval _dest := $(call word-colon,2,$(cf))) \ + $(ifeq kernel,$(_dest), \ + $(eval HAS_PREBUILT_KERNEL := true))) + endif + + ifneq ($(HAS_PREBUILT_KERNEL),) + $(warning ***************************************************************) + $(warning * Using prebuilt kernel binary instead of source *) + $(warning * THIS IS DEPRECATED, AND WILL BE DISCONTINUED *) + $(warning * Please configure your device to download the kernel *) + $(warning * source repository to $(KERNEL_SRC)) + $(warning * See http://wiki.cyanogenmod.org/w/Doc:_integrated_kernel_building) + $(warning * for more information *) + $(warning ***************************************************************) + FULL_KERNEL_BUILD := false + KERNEL_BIN := $(TARGET_PREBUILT_KERNEL) + else + $(warning ***************************************************************) + $(warning * *) + $(warning * No kernel source found, and no fallback prebuilt defined. *) + $(warning * Please make sure your device is properly configured to *) + $(warning * download the kernel repository to $(KERNEL_SRC)) + $(warning * and add the TARGET_KERNEL_CONFIG variable to BoardConfig.mk *) + $(warning * *) + $(warning * As an alternative, define the TARGET_PREBUILT_KERNEL *) + $(warning * variable with the path to the prebuilt binary kernel image *) + $(warning * in your BoardConfig.mk file *) + $(warning * *) + $(warning ***************************************************************) + $(error "NO KERNEL") + endif +else + NEEDS_KERNEL_COPY := true + ifeq ($(TARGET_KERNEL_CONFIG),) + $(warning **********************************************************) + $(warning * Kernel source found, but no configuration was defined *) + $(warning * Please add the TARGET_KERNEL_CONFIG variable to your *) + $(warning * BoardConfig.mk file *) + $(warning **********************************************************) + # $(error "NO KERNEL CONFIG") + else + #$(info Kernel source found, building it) + FULL_KERNEL_BUILD := true + KERNEL_BIN := $(TARGET_PREBUILT_INT_KERNEL) + endif +endif + +ifeq ($(FULL_KERNEL_BUILD),true) + +KERNEL_HEADERS_INSTALL := $(KERNEL_OUT)/usr +KERNEL_HEADERS_INSTALL_STAMP := $(KERNEL_OUT)/.headers_install_stamp + +ifeq ($(NEED_KERNEL_MODULE_ROOT),true) +KERNEL_MODULES_INSTALL := root +KERNEL_MODULES_OUT := $(TARGET_ROOT_OUT)/lib/modules +else +KERNEL_MODULES_INSTALL := system +KERNEL_MODULES_OUT := $(TARGET_OUT)/lib/modules +endif + +TARGET_KERNEL_CROSS_COMPILE_PREFIX := $(strip $(TARGET_KERNEL_CROSS_COMPILE_PREFIX)) +ifneq ($(TARGET_KERNEL_CROSS_COMPILE_PREFIX),) +KERNEL_TOOLCHAIN_PREFIX ?= $(TARGET_KERNEL_CROSS_COMPILE_PREFIX) +else ifeq ($(KERNEL_ARCH),arm64) +KERNEL_TOOLCHAIN_PREFIX ?= aarch64-linux-androidkernel- +else ifeq ($(KERNEL_ARCH),arm) +KERNEL_TOOLCHAIN_PREFIX ?= arm-linux-androidkernel- +else ifeq ($(KERNEL_ARCH),x86) +KERNEL_TOOLCHAIN_PREFIX ?= x86_64-linux-androidkernel- +endif + +ifeq ($(KERNEL_TOOLCHAIN),) +KERNEL_TOOLCHAIN_PATH := $(KERNEL_TOOLCHAIN_PREFIX) +else +ifneq ($(KERNEL_TOOLCHAIN_PREFIX),) +KERNEL_TOOLCHAIN_PATH := $(KERNEL_TOOLCHAIN)/$(KERNEL_TOOLCHAIN_PREFIX) +endif +endif + +ifneq ($(USE_CCACHE),) + # Detect if the system already has ccache installed to use instead of the prebuilt + ccache := $(shell which ccache) + + ifeq ($(ccache),) + ccache := $(ANDROID_BUILD_TOP)/prebuilts/misc/$(HOST_PREBUILT_TAG)/ccache/ccache + # Check that the executable is here. + ccache := $(strip $(wildcard $(ccache))) + endif +endif + +KERNEL_CROSS_COMPILE := CROSS_COMPILE="$(ccache) $(KERNEL_TOOLCHAIN_PATH)" +ccache = + +define mv-modules + mdpath=`find $(KERNEL_MODULES_OUT) -type f -name modules.order`;\ + if [ "$$mdpath" != "" ];then\ + mpath=`dirname $$mdpath`;\ + ko=`find $$mpath/kernel -type f -name *.ko`;\ + for i in $$ko; do $(KERNEL_TOOLCHAIN_PATH)strip --strip-unneeded $$i;\ + mv $$i $(KERNEL_MODULES_OUT)/; done;\ + fi +endef + +define clean-module-folder + mdpath=`find $(KERNEL_MODULES_OUT) -type f -name modules.order`;\ + if [ "$$mdpath" != "" ];then\ + mpath=`dirname $$mdpath`; rm -rf $$mpath;\ + fi +endef + +ifeq ($(HOST_OS),darwin) + MAKE_FLAGS += C_INCLUDE_PATH=$(ANDROID_BUILD_TOP)/external/elfutils/libelf:/usr/local/opt/openssl/include + MAKE_FLAGS += LIBRARY_PATH=/usr/local/opt/openssl/lib +endif + +ifeq ($(TARGET_KERNEL_MODULES),) + TARGET_KERNEL_MODULES := no-external-modules +endif + +$(KERNEL_OUT_STAMP): + $(hide) mkdir -p $(KERNEL_OUT) + $(hide) rm -rf $(KERNEL_MODULES_OUT) + $(hide) mkdir -p $(KERNEL_MODULES_OUT) + $(hide) touch $@ + +KERNEL_ADDITIONAL_CONFIG_OUT := $(KERNEL_OUT)/.additional_config + +.PHONY: force_additional_config +$(KERNEL_ADDITIONAL_CONFIG_OUT): force_additional_config + $(hide) cmp -s $(KERNEL_ADDITIONAL_CONFIG_SRC) $@ || cp $(KERNEL_ADDITIONAL_CONFIG_SRC) $@; + +$(KERNEL_CONFIG): $(KERNEL_OUT_STAMP) $(KERNEL_DEFCONFIG_SRC) $(KERNEL_ADDITIONAL_CONFIG_OUT) + @echo "Building Kernel Config" + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) VARIANT_DEFCONFIG=$(VARIANT_DEFCONFIG) SELINUX_DEFCONFIG=$(SELINUX_DEFCONFIG) $(KERNEL_DEFCONFIG) + $(hide) if [ ! -z "$(KERNEL_CONFIG_OVERRIDE)" ]; then \ + echo "Overriding kernel config with '$(KERNEL_CONFIG_OVERRIDE)'"; \ + echo $(KERNEL_CONFIG_OVERRIDE) >> $(KERNEL_OUT)/.config; \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) oldconfig; fi + # Create defconfig build artifact + $(hide) $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) savedefconfig + $(hide) if [ ! -z "$(KERNEL_ADDITIONAL_CONFIG)" ]; then \ + echo "Using additional config '$(KERNEL_ADDITIONAL_CONFIG)'"; \ + $(KERNEL_SRC)/scripts/kconfig/merge_config.sh -m -O $(KERNEL_OUT) $(KERNEL_OUT)/.config $(KERNEL_SRC)/arch/$(KERNEL_ARCH)/configs/$(KERNEL_ADDITIONAL_CONFIG); \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) KCONFIG_ALLCONFIG=$(KERNEL_OUT)/.config alldefconfig; fi + +TARGET_KERNEL_BINARIES: $(KERNEL_OUT_STAMP) $(KERNEL_CONFIG) $(KERNEL_HEADERS_INSTALL_STAMP) + @echo "Building Kernel" + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) $(TARGET_PREBUILT_INT_KERNEL_TYPE) + $(hide) if grep -q 'CONFIG_OF=y' $(KERNEL_CONFIG) ; \ + then \ + echo "Building DTBs" ; \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) dtbs ; \ + else \ + echo "DTBs not enabled" ; \ + fi ; + $(hide) if grep -q 'CONFIG_MODULES=y' $(KERNEL_CONFIG) ; \ + then \ + echo "Building Kernel Modules" ; \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) modules && \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) INSTALL_MOD_PATH=../../$(KERNEL_MODULES_INSTALL) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) modules_install && \ + $(mv-modules) && \ + $(clean-module-folder) ; \ + else \ + echo "Kernel Modules not enabled" ; \ + fi ; + + +$(TARGET_KERNEL_MODULES): TARGET_KERNEL_BINARIES + +$(TARGET_PREBUILT_INT_KERNEL): $(TARGET_KERNEL_MODULES) + +$(KERNEL_HEADERS_INSTALL_STAMP): $(KERNEL_OUT_STAMP) $(KERNEL_CONFIG) + @echo "Building Kernel Headers" + $(hide) if [ ! -z "$(KERNEL_HEADER_DEFCONFIG)" ]; then \ + rm -f ../$(KERNEL_CONFIG); \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_HEADER_ARCH) $(KERNEL_CROSS_COMPILE) VARIANT_DEFCONFIG=$(VARIANT_DEFCONFIG) SELINUX_DEFCONFIG=$(SELINUX_DEFCONFIG) $(KERNEL_HEADER_DEFCONFIG); \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_HEADER_ARCH) $(KERNEL_CROSS_COMPILE) headers_install; fi + $(hide) if [ "$(KERNEL_HEADER_DEFCONFIG)" != "$(KERNEL_DEFCONFIG)" ]; then \ + echo "Used a different defconfig for header generation"; \ + rm -f ../$(KERNEL_CONFIG); \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) VARIANT_DEFCONFIG=$(VARIANT_DEFCONFIG) SELINUX_DEFCONFIG=$(SELINUX_DEFCONFIG) $(KERNEL_DEFCONFIG); fi + $(hide) if [ ! -z "$(KERNEL_CONFIG_OVERRIDE)" ]; then \ + echo "Overriding kernel config with '$(KERNEL_CONFIG_OVERRIDE)'"; \ + echo $(KERNEL_CONFIG_OVERRIDE) >> $(KERNEL_OUT)/.config; \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) oldconfig; fi + $(hide) if [ ! -z "$(KERNEL_ADDITIONAL_CONFIG)" ]; then \ + echo "Using additional config '$(KERNEL_ADDITIONAL_CONFIG)'"; \ + $(KERNEL_SRC)/scripts/kconfig/merge_config.sh -m -O $(KERNEL_OUT) $(KERNEL_OUT)/.config $(KERNEL_SRC)/arch/$(KERNEL_ARCH)/configs/$(KERNEL_ADDITIONAL_CONFIG); \ + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) KCONFIG_ALLCONFIG=$(KERNEL_OUT)/.config alldefconfig; fi + $(hide) touch $@ + +# provide this rule because there are dependencies on this throughout the repo +$(KERNEL_HEADERS_INSTALL): $(KERNEL_HEADERS_INSTALL_STAMP) + +kerneltags: $(KERNEL_OUT_STAMP) $(KERNEL_CONFIG) + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) tags + +kernelconfig: KERNELCONFIG_MODE := menuconfig +kernelxconfig: KERNELCONFIG_MODE := xconfig +kernelxconfig kernelconfig: $(KERNEL_OUT_STAMP) + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) $(KERNEL_DEFCONFIG) + env KCONFIG_NOTIMESTAMP=true \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) $(KERNELCONFIG_MODE) + env KCONFIG_NOTIMESTAMP=true \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) savedefconfig + cp $(KERNEL_OUT)/defconfig $(KERNEL_DEFCONFIG_SRC) + +kernelsavedefconfig: $(KERNEL_OUT_STAMP) + $(MAKE) $(MAKE_FLAGS) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) $(KERNEL_DEFCONFIG) + env KCONFIG_NOTIMESTAMP=true \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) savedefconfig + cp $(KERNEL_OUT)/defconfig $(KERNEL_DEFCONFIG_SRC) + +alldefconfig: $(KERNEL_OUT_STAMP) + env KCONFIG_NOTIMESTAMP=true \ + $(MAKE) -C $(KERNEL_SRC) O=$(KERNEL_OUT) ARCH=$(KERNEL_ARCH) $(KERNEL_CROSS_COMPILE) alldefconfig + +endif # FULL_KERNEL_BUILD + +## Install it + +ifeq ($(NEEDS_KERNEL_COPY),true) +file := $(INSTALLED_KERNEL_TARGET) +ALL_PREBUILT += $(file) +$(file) : $(KERNEL_BIN) | $(ACP) + $(transform-prebuilt-to-target) + +ALL_PREBUILT += $(INSTALLED_KERNEL_TARGET) +endif + +.PHONY: kernel +kernel: $(INSTALLED_KERNEL_TARGET) diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk index 32d56a76e82..8a066700f04 100644 --- a/core/tasks/oem_image.mk +++ b/core/tasks/oem_image.mk @@ -15,7 +15,16 @@ # # We build oem.img only if it's asked for. +skip_oem_image := true ifneq ($(filter $(MAKECMDGOALS),oem_image),) + skip_oem_image := false +endif + +ifneq ($(BOARD_OEMIMAGE_FILE_SYSTEM_TYPE),) + skip_oem_image := false +endif + +ifneq ($(skip_oem_image),true) ifndef BOARD_OEMIMAGE_PARTITION_SIZE $(error BOARD_OEMIMAGE_PARTITION_SIZE is not set.) endif @@ -43,4 +52,4 @@ $(INSTALLED_OEMIMAGE_TARGET) : $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_OEMIMAGE_F oem_image : $(INSTALLED_OEMIMAGE_TARGET) $(call dist-for-goals, oem_image, $(INSTALLED_OEMIMAGE_TARGET)) -endif # oem_image in $(MAKECMDGOALS) +endif diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk index 362b2293514..2ecfbbf3d59 100644 --- a/core/tasks/sdk-addon.mk +++ b/core/tasks/sdk-addon.mk @@ -105,7 +105,7 @@ $(full_target): PRIVATE_DOCS_DIRS := $(addprefix $(OUT_DOCS)/, $(doc_modules)) $(full_target): PRIVATE_STAGING_DIR := $(call append-path,$(staging),$(addon_dir_leaf)) $(full_target): $(sdk_addon_deps) | $(ACP) - @echo Packaging SDK Addon: $@ + @echo "Packaging SDK Addon:"" $@" $(hide) mkdir -p $(PRIVATE_STAGING_DIR)/docs $(hide) for d in $(PRIVATE_DOCS_DIRS); do \ $(ACP) -r $$d $(PRIVATE_STAGING_DIR)/docs ;\ diff --git a/core/version_defaults.mk b/core/version_defaults.mk index 2c8a21f14a7..b09ab6cdc2b 100644 --- a/core/version_defaults.mk +++ b/core/version_defaults.mk @@ -43,7 +43,16 @@ ifeq "" "$(PLATFORM_VERSION)" # which is the version that we reveal to the end user. # Update this value when the platform version changes (rather # than overriding it somewhere else). Can be an arbitrary string. - PLATFORM_VERSION := 7.1.1 + + # When you add a new PLATFORM_VERSION which will result in a new + # PLATFORM_SDK_VERSION please ensure you add a corresponding isAtLeast* + # method in the following java file: + # frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java + + # When you change PLATFORM_VERSION for a given PLATFORM_SDK_VERSION + # please add that PLATFORM_VERSION to the following text file: + # cts/tests/tests/os/assets/platform_versions.txt + PLATFORM_VERSION := 7.1.2 endif ifeq "" "$(PLATFORM_SDK_VERSION)" @@ -55,6 +64,14 @@ ifeq "" "$(PLATFORM_SDK_VERSION)" # intermediate builds). During development, this number remains at the # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds # the code-name of the new development work. + + # When you change PLATFORM_SDK_VERSION please ensure you also update the + # corresponding methods for isAtLeast* in the following java file: + # frameworks/support/compat/gingerbread/android/support/v4/os/BuildCompat.java + + # When you increment the PLATFORM_SDK_VERSION please ensure you also + # clear out the following text file of all older PLATFORM_VERSION's: + # cts/tests/tests/os/assets/platform_versions.txt PLATFORM_SDK_VERSION := 25 endif @@ -114,7 +131,7 @@ ifeq "" "$(PLATFORM_SECURITY_PATCH)" # It must be of the form "YYYY-MM-DD" on production devices. # It must match one of the Android Security Patch Level strings of the Public Security Bulletins. # If there is no $PLATFORM_SECURITY_PATCH set, keep it empty. - PLATFORM_SECURITY_PATCH := 2016-12-05 + PLATFORM_SECURITY_PATCH := 2017-12-05 endif ifeq "" "$(PLATFORM_BASE_OS)" @@ -140,7 +157,7 @@ ifeq "" "$(BUILD_DATETIME)" BUILD_DATETIME := $(shell date +%s) endif -ifneq (,$(findstring Darwin,$(shell uname -sm))) +ifneq (,$(findstring Darwin,$(UNAME))) DATE := date -r $(BUILD_DATETIME) else DATE := date -d @$(BUILD_DATETIME) diff --git a/envsetup.sh b/envsetup.sh index 35df2d5f3e1..0bb2a293a1f 100644 --- a/envsetup.sh +++ b/envsetup.sh @@ -4,12 +4,18 @@ Invoke ". build/envsetup.sh" from your shell to add the following functions to y - lunch: lunch - - tapas: tapas [ ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user] - croot: Changes directory to the top of the tree. +- cout: Changes directory to out. - m: Makes from the top of the tree. - mm: Builds all of the modules in the current directory, but not their dependencies. - mmm: Builds all of the modules in the supplied directories, but not their dependencies. To limit the modules being built use the syntax: mmm dir/:target1,target2. - mma: Builds all of the modules in the current directory, and their dependencies. - mmma: Builds all of the modules in the supplied directories, and their dependencies. +- mmap: Builds all of the modules in the current directory, and its dependencies, then pushes the package to the device. +- mmp: Builds all of the modules in the current directory and pushes them to the device. +- mmmp: Builds all of the modules in the supplied directories and pushes them to the device. +- mms: Short circuit builder. Quickly re-build the kernel, rootfs, boot and system images + without deep dependencies. Requires the full build to have run before. - provision: Flash device with all required partitions. Options will be passed on to fastboot. - cgrep: Greps on all local C/C++ files. - ggrep: Greps on all local Gradle files. @@ -20,6 +26,20 @@ Invoke ". build/envsetup.sh" from your shell to add the following functions to y - sepgrep: Greps on all local sepolicy files. - sgrep: Greps on all local source files. - godir: Go to the directory containing a file. +- cmremote: Add git remote for CM Gerrit Review +- cmgerrit: A Git wrapper that fetches/pushes patch from/to CM Gerrit Review +- aospremote: Add git remote for matching AOSP repository +- cafremote: Add git remote for matching CodeAurora repository. +- cmrebase: Rebase a Gerrit change and push it again +- mka: Builds using SCHED_BATCH on all processors +- mkap: Builds the module(s) using mka and pushes them to the device. +- cmka: Cleans and builds using mka. +- repolastsync: Prints date and time of last repo sync. +- reposync: Parallel repo sync using ionice and SCHED_BATCH +- repopick: Utility to fetch changes from Gerrit. +- installboot: Installs a boot.img to the connected device. +- installrecovery: Installs a recovery.img to the connected device. +- repodiff: Diff 2 different branches or tags within the same repo Environment options: - SANITIZE_HOST: Set to 'true' to use ASAN for all host modules. Note that @@ -29,12 +49,9 @@ Environment options: Look at the source to view more functions. The complete list is: EOF T=$(gettop) - local A - A="" for i in `cat $T/build/envsetup.sh | sed -n "/^[[:blank:]]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do - A="$A $i" - done - echo $A + echo "$i" + done | column } # Get all the build variables needed by this script in a single call to the build system. @@ -45,7 +62,7 @@ function build_build_var_cache() cached_vars=`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '` cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' ' ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '` # Call the build system to dump the "=" pairs as a shell script. - build_dicts_script=`\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \ + build_dicts_script=`\cd $T; export CALLED_FROM_SETUP=true; export BUILD_SYSTEM=build/core; \ command make --no-print-directory -f build/core/config.mk \ dump-many-vars \ DUMP_MANY_VARS="$cached_vars" \ @@ -74,11 +91,11 @@ function build_build_var_cache() function destroy_build_var_cache() { unset BUILD_VAR_CACHE_READY - for v in $cached_vars; do + for v in $(echo $cached_vars | tr " " "\n"); do unset var_cache_$v done unset cached_vars - for v in $cached_abs_vars; do + for v in $(echo $cached_abs_vars | tr " " "\n"); do unset abs_var_cache_$v done unset cached_abs_vars @@ -98,7 +115,7 @@ function get_abs_build_var() echo "Couldn't locate the top of the tree. Try setting TOP." >&2 return fi - (\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \ + (\cd $T; export CALLED_FROM_SETUP=true; export BUILD_SYSTEM=build/core; \ command make --no-print-directory -f build/core/config.mk dumpvar-abs-$1) } @@ -116,7 +133,7 @@ function get_build_var() echo "Couldn't locate the top of the tree. Try setting TOP." >&2 return fi - (\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \ + (\cd $T; export CALLED_FROM_SETUP=true; export BUILD_SYSTEM=build/core; \ command make --no-print-directory -f build/core/config.mk dumpvar-$1) } @@ -128,6 +145,15 @@ function check_product() echo "Couldn't locate the top of the tree. Try setting TOP." >&2 return fi + + if (echo -n $1 | grep -q -e "^aicp_") ; then + AICP_BUILD=$(echo -n $1 | sed -e 's/^aicp_//g') + export BUILD_NUMBER=$((date +%s%N ; echo $AICP_BUILD; hostname) | openssl sha1 | sed -e 's/.*=//g; s/ //g' | cut -c1-10) + else + AICP_BUILD= + fi + export AICP_BUILD + TARGET_PRODUCT=$1 \ TARGET_BUILD_VARIANT= \ TARGET_BUILD_TYPE= \ @@ -282,7 +308,6 @@ function set_stuff_for_environment() setpaths set_sequence_number - export ANDROID_BUILD_TOP=$(gettop) # With this environment variable new GCC can apply colors to warnings/errors export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01' export ASAN_OPTIONS=detect_leaks=0 @@ -300,35 +325,42 @@ function settitle() local product=$TARGET_PRODUCT local variant=$TARGET_BUILD_VARIANT local apps=$TARGET_BUILD_APPS + if [ -z "$PROMPT_COMMAND" ]; then + # No prompts + PROMPT_COMMAND="echo -ne \"\033]0;${USER}@${HOSTNAME}: ${PWD}\007\"" + elif [ -z "$(echo $PROMPT_COMMAND | grep '033]0;')" ]; then + # Prompts exist, but no hardstatus + PROMPT_COMMAND="echo -ne \"\033]0;${USER}@${HOSTNAME}: ${PWD}\007\";${PROMPT_COMMAND}" + fi + if [ ! -z "$ANDROID_PROMPT_PREFIX" ]; then + PROMPT_COMMAND="$(echo $PROMPT_COMMAND | sed -e 's/$ANDROID_PROMPT_PREFIX //g')" + fi + if [ -z "$apps" ]; then - export PROMPT_COMMAND="echo -ne \"\033]0;[${arch}-${product}-${variant}] ${USER}@${HOSTNAME}: ${PWD}\007\"" + ANDROID_PROMPT_PREFIX="[${arch}-${product}-${variant}]" else - export PROMPT_COMMAND="echo -ne \"\033]0;[$arch $apps $variant] ${USER}@${HOSTNAME}: ${PWD}\007\"" + ANDROID_PROMPT_PREFIX="[$arch $apps $variant]" fi + export ANDROID_PROMPT_PREFIX + + # Inject build data into hardstatus + export PROMPT_COMMAND="$(echo $PROMPT_COMMAND | sed -e 's/\\033]0;\(.*\)\\007/\\033]0;$ANDROID_PROMPT_PREFIX \1\\007/g')" fi } -function addcompletions() +function check_bash_version() { - local T dir f - # Keep us from trying to run in something that isn't bash. if [ -z "${BASH_VERSION}" ]; then - return + return 1 fi # Keep us from trying to run in bash that's too old. - if [ ${BASH_VERSINFO[0]} -lt 3 ]; then - return + if [ "${BASH_VERSINFO[0]}" -lt 4 ] ; then + return 2 fi - dir="sdk/bash_completion" - if [ -d ${dir} ]; then - for f in `/bin/ls ${dir}/[a-z]*.bash 2> /dev/null`; do - echo "including $f" - . $f - done - fi + return 0 } function choosetype() @@ -526,23 +558,96 @@ function print_lunch_menu() local uname=$(uname) echo echo "You're building on" $uname + if [ "$(uname)" = "Darwin" ] ; then + echo " (ohai, koush!)" + fi echo - echo "Lunch menu... pick a combo:" + if [ "z${AICP_DEVICES_ONLY}" != "z" ]; then + echo "Breakfast menu... pick a combo:" + else + echo "Lunch menu... pick a combo:" + fi local i=1 local choice for choice in ${LUNCH_MENU_CHOICES[@]} do - echo " $i. $choice" + echo " $i. $choice " i=$(($i+1)) - done + done | column + + if [ "z${AICP_DEVICES_ONLY}" != "z" ]; then + echo "... and don't forget the bacon!" + fi echo } +function brunch() +{ + breakfast $* + if [ $? -eq 0 ]; then + mka bacon + else + echo "No such item in brunch menu. Try 'breakfast'" + return 1 + fi + return $? +} + +function brunchopen() +{ + breakfast $* + if [ $? -eq 0 ]; then + mka bacon && xdg-open $ANDROID_PRODUCT_OUT + else + echo "No such item in brunch menu. Try 'breakfast'" + return 1 + fi + return $? +} + +function breakfast() +{ + target=$1 + local variant=$2 + AICP_DEVICES_ONLY="true" + unset LUNCH_MENU_CHOICES + add_lunch_combo full-eng + for f in `/bin/ls vendor/aicp/vendorsetup.sh 2> /dev/null` + do + if [[ ${ENVSETUP_DEBUG} != false ]]; then + echo "including $f" + fi + . $f + done + unset f + + if [ $# -eq 0 ]; then + # No arguments, so let's have the full menu + lunch + else + echo "z$target" | grep -q "-" + if [ $? -eq 0 ]; then + # A buildtype was specified, assume a full device name + lunch $target + else + # This is probably just the AICP model name + if [ -z "$variant" ]; then + variant="userdebug" + fi + lunch aicp_$target-$variant + fi + fi + return $? +} + +alias bib=breakfast + function lunch() { local answer + LUNCH_MENU_CHOICES=($(for l in ${LUNCH_MENU_CHOICES[@]}; do echo "$l"; done | sort)) if [ "$1" ] ; then answer=$1 @@ -588,9 +693,25 @@ function lunch() fi local product=$(echo -n $selection | sed -e "s/-.*$//") + check_product $product + if [ $? -ne 0 ] + then + # if we can't find a product, try to grab it off the AICP github + T=$(gettop) + pushd $T > /dev/null + build/tools/roomservice.py $product + cd - > /dev/null + check_product $product + else + T=$(gettop) + pushd $T > /dev/null + build/tools/roomservice.py $product true + cd - > /dev/null + fi TARGET_PRODUCT=$product \ TARGET_BUILD_VARIANT=$variant \ build_build_var_cache + if [ $? -ne 0 ] then echo @@ -611,6 +732,8 @@ function lunch() echo + fixup_common_out_dir + set_stuff_for_environment printconfig destroy_build_var_cache @@ -627,7 +750,7 @@ function _lunch() COMPREPLY=( $(compgen -W "${LUNCH_MENU_CHOICES[*]}" -- ${cur}) ) return 0 } -complete -F _lunch lunch +complete -F _lunch lunch 2>/dev/null # Configures the build to build unbundled apps. # Run tapas with one or more app names (from LOCAL_PACKAGE_NAME) @@ -682,6 +805,57 @@ function tapas() destroy_build_var_cache } +function eat() +{ + if [ "$OUT" ] ; then + MODVERSION=$(get_build_var AICP_VERSION) + ZIPFILE=aicp-$MODVERSION.zip + ZIPPATH=$OUT/$ZIPFILE + if [ ! -f $ZIPPATH ] ; then + echo "Nothing to eat" + return 1 + fi + adb start-server # Prevent unexpected starting server message from adb get-state in the next line + if [ $(adb get-state) != device -a $(adb shell test -e /sbin/recovery 2> /dev/null; echo $?) != 0 ] ; then + echo "No device is online. Waiting for one..." + echo "Please connect USB and/or enable USB debugging" + until [ $(adb get-state) = device -o $(adb shell test -e /sbin/recovery 2> /dev/null; echo $?) = 0 ];do + sleep 1 + done + echo "Device Found.." + fi + if (adb shell getprop ro.aicp.device | grep -q "$AICP_BUILD"); + then + # if adbd isn't root we can't write to /cache/recovery/ + adb root + sleep 1 + adb wait-for-device + cat << EOF > /tmp/command +--sideload_auto_reboot +EOF + if adb push /tmp/command /cache/recovery/ ; then + echo "Rebooting into recovery for sideload installation" + adb reboot recovery + adb wait-for-sideload + adb sideload $ZIPPATH + fi + rm /tmp/command + else + echo "Nothing to eat" + return 1 + fi + return $? + else + echo "The connected device does not appear to be $AICP_BUILD, run away!" + fi +} + +function omnom +{ + brunch $* + eat +} + function gettop { local TOPFILE=build/core/envsetup.mk @@ -765,7 +939,6 @@ function mm() local M=$(findmakefile) local MODULES= local GET_INSTALL_PATH= - local ARGS= # Remove the path to top as the makefilepath needs to be relative local M=`echo $M|sed 's:'$T'/::'` if [ ! "$T" ]; then @@ -782,12 +955,12 @@ function mm() done if [ -n "$GET_INSTALL_PATH" ]; then MODULES= - ARGS=GET-INSTALL-PATH + # set all args to 'GET-INSTALL-PATH' + set -- GET-INSTALL-PATH else MODULES=all_modules - ARGS=$@ fi - ONE_SHOT_MAKEFILE=$M $DRV make -C $T -f build/core/main.mk $MODULES $ARGS + ONE_SHOT_MAKEFILE=$M $DRV make -C $T -f build/core/main.mk $MODULES "$@" fi fi } @@ -802,8 +975,15 @@ function mmm() local ARGS= local DIR TO_CHOP local GET_INSTALL_PATH= - local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') - local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + + if [ "$(__detect_shell)" = "zsh" ]; then + set -lA DASH_ARGS $(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') + set -lA DIRS $(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + else + local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') + local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + fi + for DIR in $DIRS ; do MODULES=`echo $DIR | sed -n -e 's/.*:\(.*$\)/\1/p' | sed 's/,/ /'` if [ "$MODULES" = "" ]; then @@ -869,8 +1049,13 @@ function mmma() local T=$(gettop) local DRV=$(getdriver $T) if [ "$T" ]; then - local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') - local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + if [ "$(__detect_shell)" = "zsh" ]; then + set -lA DASH_ARGS $(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') + set -lA DIRS $(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + else + local DASH_ARGS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^-.*$/') + local DIRS=$(echo "$@" | awk -v RS=" " -v ORS=" " '/^[^-].*$/') + fi local MY_PWD=`PWD= /bin/pwd` if [ "$MY_PWD" = "$T" ]; then MY_PWD= @@ -915,6 +1100,15 @@ function croot() fi } +function cout() +{ + if [ "$OUT" ]; then + cd $OUT + else + echo "Couldn't locate out directory. Try setting OUT." + fi +} + function cproj() { TOPFILE=build/core/envsetup.mk @@ -1128,6 +1322,106 @@ function is64bit() fi } +function dddclient() +{ + local OUT_ROOT=$(get_abs_build_var PRODUCT_OUT) + local OUT_SYMBOLS=$(get_abs_build_var TARGET_OUT_UNSTRIPPED) + local OUT_SO_SYMBOLS=$(get_abs_build_var TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED) + local OUT_VENDOR_SO_SYMBOLS=$(get_abs_build_var TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED) + local OUT_EXE_SYMBOLS=$(get_symbols_directory) + local PREBUILTS=$(get_abs_build_var ANDROID_PREBUILTS) + local ARCH=$(get_build_var TARGET_ARCH) + local GDB + case "$ARCH" in + arm) GDB=arm-linux-androideabi-gdb;; + arm64) GDB=arm-linux-androideabi-gdb; GDB64=aarch64-linux-android-gdb;; + mips|mips64) GDB=mips64el-linux-android-gdb;; + x86) GDB=x86_64-linux-android-gdb;; + x86_64) GDB=x86_64-linux-android-gdb;; + *) echo "Unknown arch $ARCH"; return 1;; + esac + + if [ "$OUT_ROOT" -a "$PREBUILTS" ]; then + local EXE="$1" + if [ "$EXE" ] ; then + EXE=$1 + if [[ $EXE =~ ^[^/].* ]] ; then + EXE="system/bin/"$EXE + fi + else + EXE="app_process" + fi + + local PORT="$2" + if [ "$PORT" ] ; then + PORT=$2 + else + PORT=":5039" + fi + + local PID="$3" + if [ "$PID" ] ; then + if [[ ! "$PID" =~ ^[0-9]+$ ]] ; then + PID=`pid $3` + if [[ ! "$PID" =~ ^[0-9]+$ ]] ; then + # that likely didn't work because of returning multiple processes + # try again, filtering by root processes (don't contain colon) + PID=`adb shell ps | \grep $3 | \grep -v ":" | awk '{print $2}'` + if [[ ! "$PID" =~ ^[0-9]+$ ]] + then + echo "Couldn't resolve '$3' to single PID" + return 1 + else + echo "" + echo "WARNING: multiple processes matching '$3' observed, using root process" + echo "" + fi + fi + fi + adb forward "tcp$PORT" "tcp$PORT" + local USE64BIT="$(is64bit $PID)" + adb shell gdbserver$USE64BIT $PORT --attach $PID & + sleep 2 + else + echo "" + echo "If you haven't done so already, do this first on the device:" + echo " gdbserver $PORT /system/bin/$EXE" + echo " or" + echo " gdbserver $PORT --attach " + echo "" + fi + + OUT_SO_SYMBOLS=$OUT_SO_SYMBOLS$USE64BIT + OUT_VENDOR_SO_SYMBOLS=$OUT_VENDOR_SO_SYMBOLS$USE64BIT + + echo >|"$OUT_ROOT/gdbclient.cmds" "set solib-absolute-prefix $OUT_SYMBOLS" + echo >>"$OUT_ROOT/gdbclient.cmds" "set solib-search-path $OUT_SO_SYMBOLS:$OUT_SO_SYMBOLS/hw:$OUT_SO_SYMBOLS/ssl/engines:$OUT_SO_SYMBOLS/drm:$OUT_SO_SYMBOLS/egl:$OUT_SO_SYMBOLS/soundfx:$OUT_VENDOR_SO_SYMBOLS:$OUT_VENDOR_SO_SYMBOLS/hw:$OUT_VENDOR_SO_SYMBOLS/egl" + echo >>"$OUT_ROOT/gdbclient.cmds" "source $ANDROID_BUILD_TOP/development/scripts/gdb/dalvik.gdb" + echo >>"$OUT_ROOT/gdbclient.cmds" "target remote $PORT" + # Enable special debugging for ART processes. + if [[ $EXE =~ (^|/)(app_process|dalvikvm)(|32|64)$ ]]; then + echo >> "$OUT_ROOT/gdbclient.cmds" "art-on" + fi + echo >>"$OUT_ROOT/gdbclient.cmds" "" + + local WHICH_GDB= + # 64-bit exe found + if [ "$USE64BIT" != "" ] ; then + WHICH_GDB=$ANDROID_TOOLCHAIN/$GDB64 + # 32-bit exe / 32-bit platform + elif [ "$(get_build_var TARGET_2ND_ARCH)" = "" ]; then + WHICH_GDB=$ANDROID_TOOLCHAIN/$GDB + # 32-bit exe / 64-bit platform + else + WHICH_GDB=$ANDROID_TOOLCHAIN_2ND_ARCH/$GDB + fi + + ddd --debugger $WHICH_GDB -x "$OUT_ROOT/gdbclient.cmds" "$OUT_EXE_SYMBOLS/$EXE" + else + echo "Unable to determine build system output dir." + fi +} + case `uname -s` in Darwin) function sgrep() @@ -1484,6 +1778,734 @@ function godir () { \cd $T/$pathname } +function cmremote() +{ + if ! git rev-parse --git-dir &> /dev/null + then + echo ".git directory not found. Please run this from the root directory of the Android repository you wish to set up." + return 1 + fi + git remote rm cmremote 2> /dev/null + GERRIT_REMOTE=$(git config --get remote.github.projectname) + CMUSER=$(git config --get review.review.cyanogenmod.org.username) + if [ -z "$CMUSER" ] + then + git remote add cmremote ssh://review.cyanogenmod.org:29418/$GERRIT_REMOTE + else + git remote add cmremote ssh://$CMUSER@review.cyanogenmod.org:29418/$GERRIT_REMOTE + fi + echo "Remote 'cmremote' created" +} + +function aospremote() +{ + if ! git rev-parse --git-dir &> /dev/null + then + echo ".git directory not found. Please run this from the root directory of the Android repository you wish to set up." + return 1 + fi + git remote rm aosp 2> /dev/null + PROJECT=$(pwd -P | sed -e "s#$ANDROID_BUILD_TOP\/##; s#-caf.*##; s#\/default##") + if (echo $PROJECT | grep -qv "^device") + then + PFX="platform/" + fi + git remote add aosp https://android.googlesource.com/$PFX$PROJECT + echo "Remote 'aosp' created" +} + +function cafremote() +{ + if ! git rev-parse --git-dir &> /dev/null + then + echo ".git directory not found. Please run this from the root directory of the Android repository you wish to set up." + return 1 + fi + git remote rm caf 2> /dev/null + PROJECT=$(pwd -P | sed -e "s#$ANDROID_BUILD_TOP\/##; s#-caf.*##; s#\/default##") + if (echo $PROJECT | grep -qv "^device") + then + PFX="platform/" + fi + git remote add caf https://source.codeaurora.org/quic/la/$PFX$PROJECT + echo "Remote 'caf' created" +} + +function installboot() +{ + if [ ! -e "$OUT/recovery/root/etc/recovery.fstab" ]; + then + echo "No recovery.fstab found. Build recovery first." + return 1 + fi + if [ ! -e "$OUT/boot.img" ]; + then + echo "No boot.img found. Run make bootimage first." + return 1 + fi + PARTITION=`grep "^\/boot" $OUT/recovery/root/etc/recovery.fstab | awk {'print $3'}` + if [ -z "$PARTITION" ]; + then + # Try for RECOVERY_FSTAB_VERSION = 2 + PARTITION=`grep "[[:space:]]\/boot[[:space:]]" $OUT/recovery/root/etc/recovery.fstab | awk {'print $1'}` + PARTITION_TYPE=`grep "[[:space:]]\/boot[[:space:]]" $OUT/recovery/root/etc/recovery.fstab | awk {'print $3'}` + if [ -z "$PARTITION" ]; + then + echo "Unable to determine boot partition." + return 1 + fi + fi + adb start-server + adb wait-for-online + adb root + sleep 1 + adb wait-for-online shell mount /system 2>&1 > /dev/null + adb wait-for-online remount + if (adb shell getprop ro.aicp.device | grep -q "$AICP_BUILD"); + then + adb push $OUT/boot.img /cache/ + for i in $OUT/system/lib/modules/*; + do + adb push $i /system/lib/modules/ + done + adb shell dd if=/cache/boot.img of=$PARTITION + adb shell chmod 644 /system/lib/modules/* + echo "Installation complete." + else + echo "The connected device does not appear to be $AICP_BUILD, run away!" + fi +} + +function installrecovery() +{ + if [ ! -e "$OUT/recovery/root/etc/recovery.fstab" ]; + then + echo "No recovery.fstab found. Build recovery first." + return 1 + fi + if [ ! -e "$OUT/recovery.img" ]; + then + echo "No recovery.img found. Run make recoveryimage first." + return 1 + fi + PARTITION=`grep "^\/recovery" $OUT/recovery/root/etc/recovery.fstab | awk {'print $3'}` + if [ -z "$PARTITION" ]; + then + # Try for RECOVERY_FSTAB_VERSION = 2 + PARTITION=`grep "[[:space:]]\/recovery[[:space:]]" $OUT/recovery/root/etc/recovery.fstab | awk {'print $1'}` + PARTITION_TYPE=`grep "[[:space:]]\/recovery[[:space:]]" $OUT/recovery/root/etc/recovery.fstab | awk {'print $3'}` + if [ -z "$PARTITION" ]; + then + echo "Unable to determine recovery partition." + return 1 + fi + fi + adb start-server + adb wait-for-online + adb root + sleep 1 + adb wait-for-online shell mount /system 2>&1 >> /dev/null + adb wait-for-online remount + if (adb shell getprop ro.aicp.device | grep -q "$AICP_BUILD"); + then + adb push $OUT/recovery.img /cache/ + adb shell dd if=/cache/recovery.img of=$PARTITION + echo "Installation complete." + else + echo "The connected device does not appear to be $AICP_BUILD, run away!" + fi +} + +function makerecipe() { + if [ -z "$1" ] + then + echo "No branch name provided." + return 1 + fi + cd android + sed -i s/'default revision=.*'/'default revision="refs\/heads\/'$1'"'/ default.xml + git commit -a -m "$1" + cd .. + + repo forall -c ' + + if [ "$REPO_REMOTE" = "github" ] + then + pwd + cmremote + git push cmremote HEAD:refs/heads/'$1' + fi + ' +} + +function cmgerrit() { + + if [ "$(__detect_shell)" = "zsh" ]; then + # zsh does not define FUNCNAME, derive from funcstack + local FUNCNAME=$funcstack[1] + fi + + if [ $# -eq 0 ]; then + $FUNCNAME help + return 1 + fi + local user=`git config --get review.review.cyanogenmod.org.username` + local review=`git config --get remote.github.review` + local project=`git config --get remote.github.projectname` + local command=$1 + shift + case $command in + help) + if [ $# -eq 0 ]; then + cat <&2 "Gerrit username not found." + return 1 + fi + local local_branch remote_branch + case $1 in + *:*) + local_branch=${1%:*} + remote_branch=${1##*:} + ;; + *) + local_branch=HEAD + remote_branch=$1 + ;; + esac + shift + git push $@ ssh://$user@$review:29418/$project \ + $local_branch:refs/for/$remote_branch || return 1 + ;; + changes|for) + if [ "$FUNCNAME" = "cmgerrit" ]; then + echo >&2 "'$FUNCNAME $command' is deprecated." + fi + ;; + __cmg_err_no_arg) + if [ $# -lt 2 ]; then + echo >&2 "'$FUNCNAME $command' missing argument." + elif [ $2 -eq 0 ]; then + if [ -n "$3" ]; then + $FUNCNAME help $1 + else + echo >&2 "'$FUNCNAME $1' missing argument." + fi + else + return 1 + fi + ;; + __cmg_err_not_repo) + if [ -z "$review" -o -z "$project" ]; then + echo >&2 "Not currently in any reviewable repository." + else + return 1 + fi + ;; + __cmg_err_not_supported) + $FUNCNAME __cmg_err_no_arg $command $# && return + case $1 in + #TODO: filter more git commands that don't use refname + init|add|rm|mv|status|clone|remote|bisect|config|stash) + echo >&2 "'$FUNCNAME $1' is not supported." + ;; + *) return 1 ;; + esac + ;; + #TODO: other special cases? + *) + $FUNCNAME __cmg_err_not_supported $command && return 1 + $FUNCNAME __cmg_err_no_arg $command $# help && return 1 + $FUNCNAME __cmg_err_not_repo && return 1 + local args="$@" + local change pre_args refs_arg post_args + case "$args" in + *--\ *) + pre_args=${args%%-- *} + post_args="-- ${args#*-- }" + ;; + *) pre_args="$args" ;; + esac + args=($pre_args) + pre_args= + if [ ${#args[@]} -gt 0 ]; then + change=${args[${#args[@]}-1]} + fi + if [ ${#args[@]} -gt 1 ]; then + pre_args=${args[0]} + for ((i=1; i<${#args[@]}-1; i++)); do + pre_args="$pre_args ${args[$i]}" + done + fi + while ((1)); do + case $change in + ""|--) + $FUNCNAME help $command + return 1 + ;; + *@*) + if [ -z "$refs_arg" ]; then + refs_arg="@${change#*@}" + change=${change%%@*} + fi + ;; + *~*) + if [ -z "$refs_arg" ]; then + refs_arg="~${change#*~}" + change=${change%%~*} + fi + ;; + *^*) + if [ -z "$refs_arg" ]; then + refs_arg="^${change#*^}" + change=${change%%^*} + fi + ;; + *:*) + if [ -z "$refs_arg" ]; then + refs_arg=":${change#*:}" + change=${change%%:*} + fi + ;; + *) break ;; + esac + done + $FUNCNAME fetch $change \ + && git $command $pre_args FETCH_HEAD$refs_arg $post_args \ + || return 1 + ;; + esac +} + +function cmrebase() { + local repo=$1 + local refs=$2 + local pwd="$(pwd)" + local dir="$(gettop)/$repo" + + if [ -z $repo ] || [ -z $refs ]; then + echo "CyanogenMod Gerrit Rebase Usage: " + echo " cmrebase " + echo " The patch IDs appear on the Gerrit commands that are offered." + echo " They consist on a series of numbers and slashes, after the text" + echo " refs/changes. For example, the ID in the following command is 26/8126/2" + echo "" + echo " git[...]ges_apps_Camera refs/changes/26/8126/2 && git cherry-pick FETCH_HEAD" + echo "" + return + fi + + if [ ! -d $dir ]; then + echo "Directory $dir doesn't exist in tree." + return + fi + cd $dir + repo=$(cat .git/config | grep git://github.com | awk '{ print $NF }' | sed s#git://github.com/##g) + echo "Starting branch..." + repo start tmprebase . + echo "Bringing it up to date..." + repo sync . + echo "Fetching change..." + git fetch "http://review.cyanogenmod.org/p/$repo" "refs/changes/$refs" && git cherry-pick FETCH_HEAD + if [ "$?" != "0" ]; then + echo "Error cherry-picking. Not uploading!" + return + fi + echo "Uploading..." + repo upload . + echo "Cleaning up..." + repo abandon tmprebase . + cd $pwd +} + +function mka() { + local T=$(gettop) + if [ "$T" ]; then + case `uname -s` in + Darwin) + make -C $T -j `sysctl hw.ncpu|cut -d" " -f2` "$@" + ;; + *) + mk_timer schedtool -B -n 10 -e ionice -n 7 make -C $T -j$(grep "^processor" /proc/cpuinfo | wc -l) "$@" + ;; + esac + + else + echo "Couldn't locate the top of the tree. Try setting TOP." + fi +} + +function cmka() { + if [ ! -z "$1" ]; then + for i in "$@"; do + case $i in + bacon|otapackage|systemimage) + mka installclean + mka $i + ;; + *) + mka clean-$i + mka $i + ;; + esac + done + else + mka clean + mka + fi +} + +function mms() { + local T=$(gettop) + if [ -z "$T" ] + then + echo "Couldn't locate the top of the tree. Try setting TOP." + return 1 + fi + + case `uname -s` in + Darwin) + local NUM_CPUS=$(sysctl hw.ncpu|cut -d" " -f2) + ONE_SHOT_MAKEFILE="__none__" \ + make -C $T -j $NUM_CPUS "$@" + ;; + *) + local NUM_CPUS=$(grep "^processor" /proc/cpuinfo | wc -l) + ONE_SHOT_MAKEFILE="__none__" \ + mk_timer schedtool -B -n 1 -e ionice -n 1 \ + make -C $T -j $NUM_CPUS "$@" + ;; + esac +} + + +function repolastsync() { + RLSPATH="$ANDROID_BUILD_TOP/.repo/.repo_fetchtimes.json" + RLSLOCAL=$(date -d "$(stat -c %z $RLSPATH)" +"%e %b %Y, %T %Z") + RLSUTC=$(date -d "$(stat -c %z $RLSPATH)" -u +"%e %b %Y, %T %Z") + echo "Last repo sync: $RLSLOCAL / $RLSUTC" +} + +function reposync() { + case `uname -s` in + Darwin) + repo sync -j 4 "$@" + ;; + *) + schedtool -B -n 1 -e ionice -n 1 `which repo` sync -j 4 "$@" + ;; + esac +} + +function repodiff() { + if [ -z "$*" ]; then + echo "Usage: repodiff [[ref-to] [--numstat]]" + return + fi + diffopts=$* repo forall -c \ + 'echo "$REPO_PATH ($REPO_REMOTE)"; git diff ${diffopts} 2>/dev/null ;' +} + +# Return success if adb is up and not in recovery +function _adb_connected { + { + if [[ "$(adb get-state)" == device && + "$(adb shell test -e /sbin/recovery; echo $?)" != 0 ]] + then + return 0 + fi + } 2>/dev/null + + return 1 +}; + +# Credit for color strip sed: http://goo.gl/BoIcm +function dopush() +{ + local func=$1 + shift + + adb start-server # Prevent unexpected starting server message from adb get-state in the next line + if ! _adb_connected; then + echo "No device is online. Waiting for one..." + echo "Please connect USB and/or enable USB debugging" + until _adb_connected; do + sleep 1 + done + echo "Device Found." + fi + + if (adb shell getprop ro.aicp.device | grep -q "$AICP_BUILD") || [ "$FORCE_PUSH" = "true" ]; + then + # retrieve IP and PORT info if we're using a TCP connection + TCPIPPORT=$(adb devices \ + | egrep '^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9]):[0-9]+[^0-9]+' \ + | head -1 | awk '{print $1}') + adb root &> /dev/null + sleep 0.3 + if [ -n "$TCPIPPORT" ] + then + # adb root just killed our connection + # so reconnect... + adb connect "$TCPIPPORT" + fi + adb wait-for-device &> /dev/null + sleep 0.3 + adb remount &> /dev/null + + mkdir -p $OUT + ($func $*|tee $OUT/.log;return ${PIPESTATUS[0]}) + ret=$?; + if [ $ret -ne 0 ]; then + rm -f $OUT/.log;return $ret + fi + + is_gnu_sed=`sed --version | head -1 | grep -c GNU` + + # Install: + if [ $is_gnu_sed -gt 0 ]; then + LOC="$(cat $OUT/.log | sed -r -e 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g' -e 's/^\[ {0,2}[0-9]{1,3}% [0-9]{1,6}\/[0-9]{1,6}\] +//' \ + | grep '^Install: ' | cut -d ':' -f 2)" + else + LOC="$(cat $OUT/.log | sed -E "s/"$'\E'"\[([0-9]{1,3}((;[0-9]{1,3})*)?)?[m|K]//g" -E "s/^\[ {0,2}[0-9]{1,3}% [0-9]{1,6}\/[0-9]{1,6}\] +//" \ + | grep '^Install: ' | cut -d ':' -f 2)" + fi + + # Copy: + if [ $is_gnu_sed -gt 0 ]; then + LOC="$LOC $(cat $OUT/.log | sed -r -e 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g' -e 's/^\[ {0,2}[0-9]{1,3}% [0-9]{1,6}\/[0-9]{1,6}\] +//' \ + | grep '^Copy: ' | cut -d ':' -f 2)" + else + LOC="$LOC $(cat $OUT/.log | sed -E "s/"$'\E'"\[([0-9]{1,3}((;[0-9]{1,3})*)?)?[m|K]//g" -E 's/^\[ {0,2}[0-9]{1,3}% [0-9]{1,6}\/[0-9]{1,6}\] +//' \ + | grep '^Copy: ' | cut -d ':' -f 2)" + fi + + # If any files are going to /data, push an octal file permissions reader to device + if [ -n "$(echo $LOC | egrep '(^|\s)/data')" ]; then + CHKPERM="/data/local/tmp/chkfileperm.sh" +( +cat <<'EOF' +#!/system/xbin/sh +FILE=$@ +if [ -e $FILE ]; then + ls -l $FILE | awk '{k=0;for(i=0;i<=8;i++)k+=((substr($1,i+2,1)~/[rwx]/)*2^(8-i));if(k)printf("%0o ",k);print}' | cut -d ' ' -f1 +fi +EOF +) > $OUT/.chkfileperm.sh + echo "Pushing file permissions checker to device" + adb push $OUT/.chkfileperm.sh $CHKPERM + adb shell chmod 755 $CHKPERM + rm -f $OUT/.chkfileperm.sh + fi + + stop_n_start=false + for FILE in $(echo $LOC | tr " " "\n"); do + # Make sure file is in $OUT/system or $OUT/data + case $FILE in + $OUT/system/*|$OUT/data/*) + # Get target file name (i.e. /system/bin/adb) + TARGET=$(echo $FILE | sed "s#$OUT##") + ;; + *) continue ;; + esac + + case $TARGET in + /data/*) + # fs_config only sets permissions and se labels for files pushed to /system + if [ -n "$CHKPERM" ]; then + OLDPERM=$(adb shell $CHKPERM $TARGET) + OLDPERM=$(echo $OLDPERM | tr -d '\r' | tr -d '\n') + OLDOWN=$(adb shell ls -al $TARGET | awk '{print $2}') + OLDGRP=$(adb shell ls -al $TARGET | awk '{print $3}') + fi + echo "Pushing: $TARGET" + adb push $FILE $TARGET + if [ -n "$OLDPERM" ]; then + echo "Setting file permissions: $OLDPERM, $OLDOWN":"$OLDGRP" + adb shell chown "$OLDOWN":"$OLDGRP" $TARGET + adb shell chmod "$OLDPERM" $TARGET + else + echo "$TARGET did not exist previously, you should set file permissions manually" + fi + adb shell restorecon "$TARGET" + ;; + /system/priv-app/SystemUI/SystemUI.apk|/system/framework/*) + # Only need to stop services once + if ! $stop_n_start; then + adb shell stop + stop_n_start=true + fi + echo "Pushing: $TARGET" + adb push $FILE $TARGET + ;; + *) + echo "Pushing: $TARGET" + adb push $FILE $TARGET + ;; + esac + done + if [ -n "$CHKPERM" ]; then + adb shell rm $CHKPERM + fi + if $stop_n_start; then + adb shell start + fi + rm -f $OUT/.log + return 0 + else + echo "The connected device does not appear to be $AICP_BUILD, run away!" + fi +} + +alias mmp='dopush mm' +alias mmmp='dopush mmm' +alias mmap='dopush mma' +alias mmmap='dopush mmma' +alias mkap='dopush mka' +alias cmkap='dopush cmka' + +function repopick() { + T=$(gettop) + $T/build/tools/repopick.py $@ +} + +function fixup_common_out_dir() { + common_out_dir=$(get_build_var OUT_DIR)/target/common + target_device=$(get_build_var TARGET_DEVICE) + if [ ! -z $AICP_FIXUP_COMMON_OUT ]; then + if [ -d ${common_out_dir} ] && [ ! -L ${common_out_dir} ]; then + mv ${common_out_dir} ${common_out_dir}-${target_device} + ln -s ${common_out_dir}-${target_device} ${common_out_dir} + else + [ -L ${common_out_dir} ] && rm ${common_out_dir} + mkdir -p ${common_out_dir}-${target_device} + ln -s ${common_out_dir}-${target_device} ${common_out_dir} + fi + else + [ -L ${common_out_dir} ] && rm ${common_out_dir} + mkdir -p ${common_out_dir} + fi +} + +# Enable SD-LLVM if available +if [ -d $(gettop)/prebuilts/snapdragon-llvm/toolchains ]; then + case `uname -s` in + Darwin) + # Darwin is not supported yet + ;; + *) + export SDCLANG=true + export SDCLANG_PATH=$(gettop)/prebuilts/snapdragon-llvm/toolchains/llvm-Snapdragon_LLVM_for_Android_3.8/prebuilt/linux-x86_64/bin + export SDCLANG_LTO_DEFS=$(gettop)/device/qcom/common/sdllvm-lto-defs.mk + ;; + esac +fi + +# Android specific JACK args +if [ -n "$JACK_SERVER_VM_ARGUMENTS" ] && [ -z "$ANDROID_JACK_VM_ARGS" ]; then + export ANDROID_JACK_VM_ARGS=$JACK_SERVER_VM_ARGUMENTS +fi + # Force JAVA_HOME to point to java 1.7/1.8 if it isn't already set. function set_java_home() { # Clear the existing JAVA_HOME value if we set it ourselves, so that @@ -1542,10 +2564,10 @@ function get_make_command() echo command make } -function make() +function mk_timer() { local start_time=$(date +"%s") - $(get_make_command) "$@" + $@ local ret=$? local end_time=$(date +"%s") local tdiff=$(($end_time-$start_time)) @@ -1609,14 +2631,30 @@ function provision() "$ANDROID_PRODUCT_OUT/provision-device" "$@" } -if [ "x$SHELL" != "x/bin/bash" ]; then +function make() +{ + mk_timer $(get_make_command) "$@" +} + +function __detect_shell() { case `ps -o command -p $$` in *bash*) + echo bash + ;; + *zsh*) + echo zsh ;; *) - echo "WARNING: Only bash is supported, use of other shell would lead to erroneous results" + echo unknown + return 1 ;; esac + return +} + + +if ! __detect_shell > /dev/null; then + echo "WARNING: Only bash and zsh are supported, use of other shell may lead to erroneous results" fi # Execute the contents of any vendorsetup.sh files we can find. @@ -1624,9 +2662,27 @@ for f in `test -d device && find -L device -maxdepth 4 -name 'vendorsetup.sh' 2> `test -d vendor && find -L vendor -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` \ `test -d product && find -L product -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` do - echo "including $f" + if [[ ${ENVSETUP_DEBUG} != false ]]; then + echo "including $f" + fi . $f done unset f -addcompletions +# Add completions +check_bash_version && { + dirs="sdk/bash_completion vendor/aicp/bash_completion" + for dir in $dirs; do + if [ -d ${dir} ]; then + for f in `/bin/ls ${dir}/[a-z]*.bash 2> /dev/null`; do + if [[ ${ENVSETUP_DEBUG} != false ]]; then + echo "including $f" + fi + + . $f + done + fi + done +} + +export ANDROID_BUILD_TOP=$(gettop) diff --git a/target/product/base.mk b/target/product/base.mk index 4d70664677b..a12f939a629 100644 --- a/target/product/base.mk +++ b/target/product/base.mk @@ -108,6 +108,7 @@ PRODUCT_PACKAGES += \ mtpd \ ndc \ netd \ + om \ ping \ ping6 \ platform.xml \ @@ -125,6 +126,7 @@ PRODUCT_PACKAGES += \ svc \ tc \ telecom \ + tm \ vdc \ vold \ wm diff --git a/target/product/core.mk b/target/product/core.mk index 0a4e0fdfed4..dfeacc5a0c4 100644 --- a/target/product/core.mk +++ b/target/product/core.mk @@ -40,7 +40,6 @@ PRODUCT_PACKAGES += \ InputDevices \ KeyChain \ Keyguard \ - LatinIME \ Launcher2 \ ManagedProvisioning \ MtpDocumentsProvider \ @@ -59,4 +58,11 @@ PRODUCT_PACKAGES += \ VpnDialogs \ MmsService +# The set of packages whose code can be loaded by the system server. +PRODUCT_SYSTEM_SERVER_APPS += \ + FusedLocation \ + InputDevices \ + KeyChain \ + Telecom + $(call inherit-product, $(SRC_TARGET_DIR)/product/core_base.mk) diff --git a/target/product/core_base.mk b/target/product/core_base.mk index 03d33e1b65d..86fb36cd767 100644 --- a/target/product/core_base.mk +++ b/target/product/core_base.mk @@ -16,14 +16,9 @@ # Note that components added here will be also shared in PDK. Components # that should not be in PDK should be added in lower level like core.mk. -PRODUCT_PROPERTY_OVERRIDES := \ - ro.config.notification_sound=OnTheHunt.ogg \ - ro.config.alarm_alert=Alarm_Classic.ogg - PRODUCT_PACKAGES += \ ContactsProvider \ DefaultContainerService \ - Home \ TelephonyProvider \ UserDictionaryProvider \ atrace \ diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk index 009ca529cc8..82a8d493184 100644 --- a/target/product/core_minimal.mk +++ b/target/product/core_minimal.mk @@ -102,17 +102,25 @@ PRODUCT_BOOT_JARS := \ org.apache.http.legacy.boot # The order of PRODUCT_SYSTEM_SERVER_JARS matters. +ifneq ($(TARGET_DISABLE_CMSDK), true) PRODUCT_SYSTEM_SERVER_JARS := \ + org.cyanogenmod.platform \ + org.cyanogenmod.hardware +endif +PRODUCT_SYSTEM_SERVER_JARS += \ services \ ethernet-service \ wifi-service -# Adoptable external storage supports both ext4 and f2fs +# The set of packages whose code can be loaded by the system server. +PRODUCT_SYSTEM_SERVER_APPS += \ + SettingsProvider \ + WallpaperBackup + +# Adoptable external storage f2fs support PRODUCT_PACKAGES += \ - e2fsck \ - make_ext4fs \ fsck.f2fs \ - make_f2fs \ + mkfs.f2fs \ PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \ ro.zygote=zygote32 diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk index ec2fa41d11c..5b03057784b 100644 --- a/target/product/core_tiny.mk +++ b/target/product/core_tiny.mk @@ -104,6 +104,13 @@ PRODUCT_SYSTEM_SERVER_JARS := \ services \ wifi-service +# The set of packages whose code can be loaded by the system server. +PRODUCT_SYSTEM_SERVER_APPS += \ + FusedLocation \ + InputDevices \ + SettingsProvider \ + WallpaperBackup + PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \ ro.zygote=zygote32 PRODUCT_COPY_FILES += \ diff --git a/target/product/emulator.mk b/target/product/emulator.mk index b08a28a2a9e..70cd5977aec 100644 --- a/target/product/emulator.mk +++ b/target/product/emulator.mk @@ -51,7 +51,8 @@ PRODUCT_PACKAGES += \ power.goldfish \ fingerprint.ranchu \ fingerprintd \ - sensors.ranchu + sensors.ranchu \ + e2fsck PRODUCT_COPY_FILES += \ device/generic/goldfish/fstab.goldfish:root/fstab.goldfish \ @@ -61,6 +62,7 @@ PRODUCT_COPY_FILES += \ device/generic/goldfish/init.ranchu.rc:root/init.ranchu.rc \ device/generic/goldfish/fstab.ranchu:root/fstab.ranchu \ device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc \ + device/generic/goldfish/input/goldfish_rotary.idc:system/usr/idc/goldfish_rotary.idc \ frameworks/native/data/etc/android.hardware.usb.accessory.xml:system/etc/permissions/android.hardware.usb.accessory.xml PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay diff --git a/target/product/full_base.mk b/target/product/full_base.mk index 65bdf0f1b4e..b98f0be5758 100644 --- a/target/product/full_base.mk +++ b/target/product/full_base.mk @@ -21,27 +21,8 @@ PRODUCT_PACKAGES := \ libfwdlockengine \ - OpenWnn \ - libWnnEngDic \ - libWnnJpnDic \ - libwnndict \ WAPPushManager -PRODUCT_PACKAGES += \ - Galaxy4 \ - HoloSpiralWallpaper \ - LiveWallpapers \ - LiveWallpapersPicker \ - MagicSmokeWallpapers \ - NoiseField \ - PhaseBeam \ - PhotoTable - -# Additional settings used in all AOSP builds -PRODUCT_PROPERTY_OVERRIDES := \ - ro.config.ringtone=Ring_Synth_04.ogg \ - ro.config.notification_sound=pixiedust.ogg - # Put en_US first in the list, so make it default. PRODUCT_LOCALES := en_US @@ -51,8 +32,12 @@ $(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk) # Get the TTS language packs $(call inherit-product-if-exists, external/svox/pico/lang/all_pico_languages.mk) +ifeq ($(TARGET_LOCALES),) # Get a list of languages. $(call inherit-product, $(SRC_TARGET_DIR)/product/locales_full.mk) +else +PRODUCT_LOCALES := $(TARGET_LOCALES) +endif # Get everything else from the parent package $(call inherit-product, $(SRC_TARGET_DIR)/product/generic_no_telephony.mk) diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk index 9a2c63a8d0a..7988e9b777d 100644 --- a/target/product/full_base_telephony.mk +++ b/target/product/full_base_telephony.mk @@ -20,12 +20,7 @@ # entirely appropriate to inherit from for on-device configurations. PRODUCT_PROPERTY_OVERRIDES := \ - keyguard.no_require_sim=true \ - ro.com.android.dataroaming=true - -PRODUCT_COPY_FILES := \ - device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \ - frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml + keyguard.no_require_sim=true $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk) $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk) diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk index 5c4835877a5..6c23fbb7d9d 100644 --- a/target/product/generic_no_telephony.mk +++ b/target/product/generic_no_telephony.mk @@ -37,12 +37,7 @@ PRODUCT_PACKAGES += \ screenrecord PRODUCT_PACKAGES += \ - librs_jni \ - libvideoeditor_jni \ - libvideoeditor_core \ - libvideoeditor_osal \ - libvideoeditor_videofilters \ - libvideoeditorplayer \ + librs_jni PRODUCT_PACKAGES += \ audio.primary.default \ diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk index 98d8c3ce90a..ddc1c7f21f4 100644 --- a/target/product/languages_full.mk +++ b/target/product/languages_full.mk @@ -22,3 +22,6 @@ # These are all the locales that have translations and are displayable # by TextView in this branch. PRODUCT_LOCALES := en_US en_AU en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ sq_AL gu_IN pa_IN be_BY bs_BA + +# CyanogenMod +PRODUCT_LOCALES += ast_ES lb_LU ku_IQ diff --git a/target/product/security/aicp-devkey.x509.pem b/target/product/security/aicp-devkey.x509.pem new file mode 100644 index 00000000000..b7a6ae404b0 --- /dev/null +++ b/target/product/security/aicp-devkey.x509.pem @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID1TCCAr2gAwIBAgIJANO67t8hIti6MA0GCSqGSIb3DQEBBQUAMIGAMQswCQYD +VQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEX +MBUGA1UECgwOQ3lhbm9nZW4sIEluYy4xGzAZBgNVBAsMElJlbGVhc2UgTWFuYWdl +bWVudDEUMBIGA1UEAwwLRGV2ZWxvcG1lbnQwHhcNMTQwNDI4MjAyODM3WhcNNDEw +OTEzMjAyODM3WjCBgDELMAkGA1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24x +EDAOBgNVBAcMB1NlYXR0bGUxFzAVBgNVBAoMDkN5YW5vZ2VuLCBJbmMuMRswGQYD +VQQLDBJSZWxlYXNlIE1hbmFnZW1lbnQxFDASBgNVBAMMC0RldmVsb3BtZW50MIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz/V9RvYnr18fraPWNeQEZNeg +Kc0A3QskImQyGY22EGBZ63KUxa6zAfAug0OYSjofVJRaTtdvBXjO/C71XZRh4wun +xhOUAJt8zIJ0lRx8GMC0GHzePEnEVvoiu3zSAPHCNf5lmdhyhccMOtC18J+evPf4 +EVBb3cis+F1m6ZoZKPgSFBR5A9CV5Tai8iiZluGGg15Wt12Rp2vmbmQxiOJZxBs4 +Ps40XR5gjO1q4R3HiGnFyql9qeecwaTUWXAd76lhNiLUr7K8IRs+96i+t5vSKajB +M8O99BtYyBtf8ItMnHSZJxtsMw+TFXNLmMtaQarpsjp0LLGuHb/vsrjgBPvzsQID +AQABo1AwTjAdBgNVHQ4EFgQUTpNgXBqV7j+33bi8B80YLQq6EL8wHwYDVR0jBBgw +FoAUTpNgXBqV7j+33bi8B80YLQq6EL8wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0B +AQUFAAOCAQEAVlVBNksK+1C3J8uQ9kVemYMozfbboV9c4PLbtVzNSO8vwZ3X5E4T +2zfQPcFsfSMIa51a1tETBcDA6k+72xHZ+xEQJQNrX+o1F1RIIrXp0OKAz/k5cXyk +OS0+nd0EXP/A1EW0m8N/X6E9wpRkIhfqtEsqeCf8GH7O9Ua2qHZ9zkTBpbAVH0oe +ZWorHBdo3GdMJ5vcjFqnDdRs8F0BnZmjS+NrgXRLhLb6ZARS/bkUQyr5TX82dgG6 +vzvKsdKyX34gsKAsjxwLWo7XXgehFfjY+SGjjilJtardr+y/KlHNEw9s9aLe+Xny +Qoa9j9Ut6/KwRaC6lSEQ7HZk6SdzFsdugA== +-----END CERTIFICATE----- diff --git a/target/product/security/aicp.x509.pem b/target/product/security/aicp.x509.pem new file mode 100644 index 00000000000..5ff19db7213 --- /dev/null +++ b/target/product/security/aicp.x509.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDZDCCAkygAwIBAgIEUfbexjANBgkqhkiG9w0BAQUFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1NlYXR0bGUxFjAUBgNVBAoTDUN5YW5v +Z2VuIEluYy4xFjAUBgNVBAsTDUN5YW5vZ2VuIEluYy4xFjAUBgNVBAMTDUN5YW5v +Z2VuIEluYy4wHhcNMTMwNzI5MjEyOTQyWhcNNDAxMjE0MjEyOTQyWjB0MQswCQYD +VQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1NlYXR0bGUxFjAUBgNVBAoT +DUN5YW5vZ2VuIEluYy4xFjAUBgNVBAsTDUN5YW5vZ2VuIEluYy4xFjAUBgNVBAMT +DUN5YW5vZ2VuIEluYy4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCP ++K2NPqdZ6UmexXQ8tsc2TkLXYhiuEsifO66qlpwsTTw1522HcbKPVoPRr/JBXqOv +E3K0HuZ/IsYYGsP/wJWWvpaWs+5xC+YkLkittK2uUzTqndpLFiDRAeICKpvDJI57 +Z0DkzVYXBPn+yw+x8ttjT/vWcJ3PEVYuew8MYPUOgKpdZlQtUBeoEBDSL8JPGViq +e6jWOlSAWekhlgb+wb9RoXhu/v2HYzp89GG0sIrAgj7vZCior5XuFmm8eWhqUhTp +TUBv/nNI/ORYt3G8IQyI2pJN1GNPAAv1uA5i4y/deX1x4GCWyN9feiD9fOj2oc3z +Hwf5Frs9BjOb9XMXecbNAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAGudhFe9VnES +fWynTYO4kWNlMC++yB6qD3pHW6HtBiaANj9wxrLCTFzf+boHNPeZ8HDkW01zAaQK +fd9/fnGmHf4q/QvxrvGbnb3Fqhw+2hknbbMUoAa+Qp+2ouf9oJaNRquZ+rHEHX8g +Rx8wGyvjaWYfQrwyZRgXj/Jrc/NXxQCmSJeexHVNXgQD6aOLHJYrJ+s+U/hwVNiM +5L+psOh89itwt8DGGSLW16HjQKmPPbWbqxgnfRbOlxWrLDq3agcrskYpDP2aGGBA +5STq/bvh9yZkrNYvMGzrXDhcJ44QRS8e1Jw/ZtfFvJD192e7KKVdy7CJWmOckCNK +gl0KCQ3MBx4= +-----END CERTIFICATE----- diff --git a/tools/adbs b/tools/adbs index a8f06c04ef3..9bd51604566 100755 --- a/tools/adbs +++ b/tools/adbs @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import os import os.path import re diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh index dcb66bf2bb0..ee843557246 100755 --- a/tools/buildinfo.sh +++ b/tools/buildinfo.sh @@ -26,10 +26,8 @@ fi if [ -n "$AB_OTA_UPDATER" ] ; then echo "ro.build.ab_update=$AB_OTA_UPDATER" fi -echo "ro.product.model=$PRODUCT_MODEL" echo "ro.product.brand=$PRODUCT_BRAND" echo "ro.product.name=$PRODUCT_NAME" -echo "ro.product.device=$TARGET_DEVICE" echo "ro.product.board=$TARGET_BOOTLOADER_BOARD_NAME" # These values are deprecated, use "ro.product.cpu.abilist" @@ -45,21 +43,31 @@ echo "ro.product.cpu.abilist32=$TARGET_CPU_ABI_LIST_32_BIT" echo "ro.product.cpu.abilist64=$TARGET_CPU_ABI_LIST_64_BIT" echo "ro.product.manufacturer=$PRODUCT_MANUFACTURER" -if [ -n "$PRODUCT_DEFAULT_LOCALE" ] ; then +if [ -n "$PRODUCT_DEFAULT_LOCALE" ] && [ -z "$TARGET_SKIP_DEFAULT_LOCALE" ] ; then echo "ro.product.locale=$PRODUCT_DEFAULT_LOCALE" fi echo "ro.wifi.channels=$PRODUCT_DEFAULT_WIFI_CHANNELS" echo "ro.board.platform=$TARGET_BOARD_PLATFORM" -echo "# ro.build.product is obsolete; use ro.product.device" -echo "ro.build.product=$TARGET_DEVICE" - -echo "# Do not try to parse description, fingerprint, or thumbprint" -echo "ro.build.description=$PRIVATE_BUILD_DESC" -echo "ro.build.fingerprint=$BUILD_FINGERPRINT" -if [ -n "$BUILD_THUMBPRINT" ] ; then - echo "ro.build.thumbprint=$BUILD_THUMBPRINT" +if [ "$TARGET_UNIFIED_DEVICE" == "" ] ; then + echo "# ro.build.product is obsolete; use ro.product.device" + echo "ro.build.product=$TARGET_DEVICE" + if [ -z "$TARGET_SKIP_PRODUCT_DEVICE" ] ; then + echo "ro.product.model=$PRODUCT_MODEL" + fi + echo "ro.product.device=$TARGET_DEVICE" + echo "# Do not try to parse description, fingerprint, or thumbprint" + echo "ro.build.description=$PRIVATE_BUILD_DESC" + echo "ro.build.fingerprint=$BUILD_FINGERPRINT" + if [ -n "$BUILD_THUMBPRINT" ] ; then + echo "ro.build.thumbprint=$BUILD_THUMBPRINT" + fi +fi +if [ -n "$DEVICE_MAINTAINERS" ] ; then + echo "ro.aicp.maintainer=$DEVICE_MAINTAINERS" fi echo "ro.build.characteristics=$TARGET_AAPT_CHARACTERISTICS" +echo "ro.aicp.device=$AICP_DEVICE" + echo "# end build properties" diff --git a/tools/check_radio_versions.py b/tools/check_radio_versions.py index ebe621f4669..2617424d42f 100755 --- a/tools/check_radio_versions.py +++ b/tools/check_radio_versions.py @@ -14,8 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import sys -import os try: from hashlib import sha1 @@ -52,8 +53,9 @@ try: f = open(fn + ".sha1") except IOError: - if not bad: print - print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key) + if not bad: + print() + print("*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)) bad = True continue for line in f: @@ -63,17 +65,19 @@ versions[h] = v if digest not in versions: - if not bad: print - print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn) + if not bad: + print() + print("*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)) bad = True continue if versions[digest] not in values: - if not bad: print - print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % ( - fn, versions[digest], key, sys.argv[1]) + if not bad: + print() + print("*** \"%s\" is version %s; not any %s allowed by \"%s\"." % ( + fn, versions[digest], key, sys.argv[1])) bad = True if bad: - print + print() sys.exit(1) diff --git a/tools/compare_fileslist.py b/tools/compare_fileslist.py index 1f507d8647b..64ad3aee79e 100755 --- a/tools/compare_fileslist.py +++ b/tools/compare_fileslist.py @@ -15,7 +15,16 @@ # limitations under the License. # -import cgi, os, string, sys +from __future__ import print_function + +import cgi, os, sys + + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + def IsDifferent(row): val = None @@ -33,27 +42,27 @@ def main(argv): data = {} index = 0 for input in inputs: - f = file(input, "r") + f = open(input) lines = f.readlines() f.close() - lines = map(string.split, lines) - lines = map(lambda (x,y): (y,int(x)), lines) + lines = [l.strip() for l in lines] + lines = [(x_y[1],int(x_y[0])) for x_y in lines] for fn,sz in lines: - if not data.has_key(fn): + if fn not in data: data[fn] = {} data[fn][index] = sz index = index + 1 rows = [] - for fn,sizes in data.iteritems(): + for fn,sizes in iteritems(data): row = [fn] for i in range(0,index): - if sizes.has_key(i): + if i in sizes: row.append(sizes[i]) else: row.append(None) rows.append(row) rows = sorted(rows, key=lambda x: x[0]) - print """ + print(""" - """ - print "" - print "" + """) + print("
") + print("") for input in inputs: combo = input.split(os.path.sep)[1] - print " " % cgi.escape(combo) - print "" + print(" " % cgi.escape(combo)) + print("") for row in rows: - print "" + print("") for sz in row[1:]: if not sz: - print " " + print(" ") elif IsDifferent(row[1:]): - print " " % sz + print(" " % sz) else: - print " " % sz - print " " % cgi.escape(row[0]) - print "" - print "
%s
%s
  %d%d%d%s
" - print "" + print(" %d" % sz) + print(" %s" % cgi.escape(row[0])) + print("") + print("") + print("") if __name__ == '__main__': main(sys.argv) diff --git a/tools/device/AndroidBoard.mk.template b/tools/device/AndroidBoard.mk.template new file mode 100644 index 00000000000..55a36d52357 --- /dev/null +++ b/tools/device/AndroidBoard.mk.template @@ -0,0 +1,8 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +ALL_PREBUILT += $(INSTALLED_KERNEL_TARGET) + +# include the non-open-source counterpart to this file +-include vendor/__MANUFACTURER__/__DEVICE__/AndroidBoardVendor.mk diff --git a/tools/device/AndroidProducts.mk.template b/tools/device/AndroidProducts.mk.template new file mode 100644 index 00000000000..f31c5bf7999 --- /dev/null +++ b/tools/device/AndroidProducts.mk.template @@ -0,0 +1,2 @@ +PRODUCT_MAKEFILES := \ + $(LOCAL_DIR)/device___DEVICE__.mk diff --git a/tools/device/BoardConfig.mk.template b/tools/device/BoardConfig.mk.template new file mode 100644 index 00000000000..617673f9807 --- /dev/null +++ b/tools/device/BoardConfig.mk.template @@ -0,0 +1,31 @@ +USE_CAMERA_STUB := true + +# inherit from the proprietary version +-include vendor/__MANUFACTURER__/__DEVICE__/BoardConfigVendor.mk + +TARGET_ARCH := arm +TARGET_NO_BOOTLOADER := true +TARGET_BOARD_PLATFORM := unknown +TARGET_CPU_ABI := armeabi-v7a +TARGET_CPU_ABI2 := armeabi +TARGET_ARCH_VARIANT := armv7-a-neon +TARGET_CPU_VARIANT := cortex-a7 +TARGET_CPU_SMP := true +ARCH_ARM_HAVE_TLS_REGISTER := true + +TARGET_BOOTLOADER_BOARD_NAME := __DEVICE__ + +BOARD_KERNEL_CMDLINE := __CMDLINE__ +BOARD_KERNEL_BASE := 0x__BASE__ +BOARD_KERNEL_PAGESIZE := __PAGE_SIZE__ + +# fix this up by examining /proc/mtd on a running device +BOARD_BOOTIMAGE_PARTITION_SIZE := 0x00380000 +BOARD_RECOVERYIMAGE_PARTITION_SIZE := 0x00480000 +BOARD_SYSTEMIMAGE_PARTITION_SIZE := 0x08c60000 +BOARD_USERDATAIMAGE_PARTITION_SIZE := 0x105c0000 +BOARD_FLASH_BLOCK_SIZE := 131072 + +TARGET_PREBUILT_KERNEL := device/__MANUFACTURER__/__DEVICE__/kernel + +BOARD_HAS_NO_SELECT_BUTTON := true diff --git a/tools/device/aicp.mk.template b/tools/device/aicp.mk.template new file mode 100644 index 00000000000..5f76abaa27b --- /dev/null +++ b/tools/device/aicp.mk.template @@ -0,0 +1,15 @@ +# Release name +PRODUCT_RELEASE_NAME := __DEVICE__ + +# Inherit some common AICP stuff. +$(call inherit-product, vendor/aicp/configs/common.mk) + +# Inherit device configuration +$(call inherit-product, device/__MANUFACTURER__/__DEVICE__/device___DEVICE__.mk) + +## Device identifier. This must come after all inclusions +PRODUCT_DEVICE := __DEVICE__ +PRODUCT_NAME := aicp___DEVICE__ +PRODUCT_BRAND := __MANUFACTURER__ +PRODUCT_MODEL := __DEVICE__ +PRODUCT_MANUFACTURER := __MANUFACTURER__ diff --git a/tools/device/device.mk.template b/tools/device/device.mk.template new file mode 100644 index 00000000000..91ffdc951cc --- /dev/null +++ b/tools/device/device.mk.template @@ -0,0 +1,24 @@ +$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_full.mk) + +# The gps config appropriate for this device +$(call inherit-product, device/common/gps/gps_us_supl.mk) + +$(call inherit-product-if-exists, vendor/__MANUFACTURER__/__DEVICE__/__DEVICE__-vendor.mk) + +DEVICE_PACKAGE_OVERLAYS += device/__MANUFACTURER__/__DEVICE__/overlay + + +ifeq ($(TARGET_PREBUILT_KERNEL),) + LOCAL_KERNEL := device/__MANUFACTURER__/__DEVICE__/kernel +else + LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL) +endif + +PRODUCT_COPY_FILES += \ + $(LOCAL_KERNEL):kernel + +$(call inherit-product, build/target/product/full.mk) + +PRODUCT_BUILD_PROP_OVERRIDES += BUILD_UTC_DATE=0 +PRODUCT_NAME := full___DEVICE__ +PRODUCT_DEVICE := __DEVICE__ diff --git a/tools/device/mkvendor.sh b/tools/device/mkvendor.sh new file mode 100755 index 00000000000..18671c0dc87 --- /dev/null +++ b/tools/device/mkvendor.sh @@ -0,0 +1,113 @@ +#!/bin/bash + +function usage +{ + echo Usage: + echo " $(basename $0) manufacturer device [boot.img]" + echo " The boot.img argument is the extracted recovery or boot image." + echo " The boot.img argument should not be provided for devices" + echo " that have non standard boot images (ie, Samsung)." + echo + echo Example: + echo " $(basename $0) motorola sholes ~/Downloads/recovery-sholes.img" + exit 0 +} + +MANUFACTURER=$1 +DEVICE=$2 +BOOTIMAGE=$3 + +UNPACKBOOTIMG=$(which unpackbootimg) + +echo Arguments: $@ + +if [ -z "$MANUFACTURER" ] +then + usage +fi + +if [ -z "$DEVICE" ] +then + usage +fi + +ANDROID_TOP=$(dirname $0)/../../../ +pushd $ANDROID_TOP > /dev/null +ANDROID_TOP=$(pwd) +popd > /dev/null + +TEMPLATE_DIR=$(dirname $0) +pushd $TEMPLATE_DIR > /dev/null +TEMPLATE_DIR=$(pwd) +popd > /dev/null + +DEVICE_DIR=$ANDROID_TOP/device/$MANUFACTURER/$DEVICE + +if [ ! -z "$BOOTIMAGE" ] +then + if [ -z "$UNPACKBOOTIMG" ] + then + echo unpackbootimg not found. Is your android build environment set up and have the host tools been built? + exit 0 + fi + + BOOTIMAGEFILE=$(basename $BOOTIMAGE) + + echo Output will be in $DEVICE_DIR + mkdir -p $DEVICE_DIR + + TMPDIR=/tmp/$(whoami)/bootimg + rm -rf $TMPDIR + mkdir -p $TMPDIR + cp $BOOTIMAGE $TMPDIR + pushd $TMPDIR > /dev/null + unpackbootimg -i $BOOTIMAGEFILE > /dev/null + mkdir ramdisk + pushd ramdisk > /dev/null + gunzip -c ../$BOOTIMAGEFILE-ramdisk.gz | cpio -i + popd > /dev/null + BASE=$(cat $TMPDIR/$BOOTIMAGEFILE-base) + CMDLINE=$(cat $TMPDIR/$BOOTIMAGEFILE-cmdline) + PAGESIZE=$(cat $TMPDIR/$BOOTIMAGEFILE-pagesize) + export SEDCMD="s#__CMDLINE__#$CMDLINE#g" + echo $SEDCMD > $TMPDIR/sedcommand + cp $TMPDIR/$BOOTIMAGEFILE-zImage $DEVICE_DIR/kernel + popd > /dev/null +else + mkdir -p $DEVICE_DIR + touch $DEVICE_DIR/kernel + BASE=10000000 + CMDLINE=no_console_suspend + PAGESIZE=00000800 + export SEDCMD="s#__CMDLINE__#$CMDLINE#g" + echo $SEDCMD > $TMPDIR/sedcommand +fi + +for file in $(find $TEMPLATE_DIR -name '*.template') +do + OUTPUT_FILE=$DEVICE_DIR/$(basename $(echo $file | sed s/\\.template//g)) + cat $file | sed s/__DEVICE__/$DEVICE/g | sed s/__MANUFACTURER__/$MANUFACTURER/g | sed -f $TMPDIR/sedcommand | sed s/__BASE__/$BASE/g | sed s/__PAGE_SIZE__/$PAGESIZE/g > $OUTPUT_FILE +done + +if [ ! -z "$TMPDIR" ] +then + RECOVERY_FSTAB=$TMPDIR/ramdisk/etc/recovery.fstab + if [ -f "$RECOVERY_FSTAB" ] + then + cp $RECOVERY_FSTAB $DEVICE_DIR/recovery.fstab + fi +fi + + +mv $DEVICE_DIR/device.mk $DEVICE_DIR/device_$DEVICE.mk + +echo Creating initial git repository. +pushd $DEVICE_DIR +git init +git add . +git commit -a -m "mkvendor.sh: Initial commit of $DEVICE" +popd + +echo Done! +echo Use the following command to set up your build environment: +echo ' 'lunch cm_$DEVICE-eng diff --git a/tools/device/recovery.fstab.template b/tools/device/recovery.fstab.template new file mode 100644 index 00000000000..41fb92e8bd3 --- /dev/null +++ b/tools/device/recovery.fstab.template @@ -0,0 +1,10 @@ +# mount point fstype device [device2] + +/boot mtd boot +/cache yaffs2 cache +/data yaffs2 userdata +/misc mtd misc +/recovery mtd recovery +/sdcard vfat /dev/block/mmcblk0p1 /dev/block/mmcblk0 +/system yaffs2 system +/sd-ext ext4 /dev/block/mmcblk0p2 diff --git a/tools/device/system.prop.template b/tools/device/system.prop.template new file mode 100644 index 00000000000..4113929395e --- /dev/null +++ b/tools/device/system.prop.template @@ -0,0 +1,3 @@ +# +# system.prop for __DEVICE__ +# diff --git a/tools/diff_package_overlays.py b/tools/diff_package_overlays.py index 0e2c773d1a9..687e1d02422 100755 --- a/tools/diff_package_overlays.py +++ b/tools/diff_package_overlays.py @@ -34,11 +34,13 @@ ... """ +from __future__ import print_function + import sys def main(argv): if len(argv) != 4: - print >> sys.stderr, __doc__ + print(sys.stderr, __doc__) sys.exit(1) f = open(argv[1]) @@ -85,7 +87,7 @@ def load_overlay_config(filename): # Print out the package names that have overlay change. for r in result: - print r + print(r) if __name__ == "__main__": main(sys.argv) diff --git a/tools/event_log_tags.py b/tools/event_log_tags.py index 645839ec29f..93244a414aa 100644 --- a/tools/event_log_tags.py +++ b/tools/event_log_tags.py @@ -14,6 +14,8 @@ """A module for reading and parsing event-log-tags files.""" +from __future__ import print_function + import re import sys @@ -55,7 +57,7 @@ def __init__(self, filename, file_object=None): if file_object is None: try: file_object = open(filename, "rb") - except (IOError, OSError), e: + except (IOError, OSError) as e: self.AddError(str(e)) return @@ -100,7 +102,7 @@ def __init__(self, filename, file_object=None): self.tags.append(Tag(tag, tagname, description, self.filename, self.linenum)) - except (IOError, OSError), e: + except (IOError, OSError) as e: self.AddError(str(e)) @@ -130,6 +132,6 @@ def WriteOutput(output_file, data): out = open(output_file, "wb") out.write(data) out.close() - except (IOError, OSError), e: - print >> sys.stderr, "failed to write %s: %s" % (output_file, e) + except (IOError, OSError) as e: + print("failed to write %s: %s" % (output_file, e), file=sys.stderr) sys.exit(1) diff --git a/tools/fileslist.py b/tools/fileslist.py index a11efaa8fed..b9e73503e20 100755 --- a/tools/fileslist.py +++ b/tools/fileslist.py @@ -15,12 +15,24 @@ # limitations under the License. # -import operator, os, sys +import json, hashlib, operator, os, sys def get_file_size(path): st = os.lstat(path) return st.st_size; +def get_file_digest(path): + if os.path.isfile(path) == False: + return "----------------------------------------------------------------" + digest = hashlib.sha256() + with open(path, 'rb') as f: + while True: + buf = f.read(1024*1024) + if not buf: + break + digest.update(buf) + return digest.hexdigest(); + def main(argv): output = [] roots = argv[1:] @@ -30,16 +42,17 @@ def main(argv): relative = dir[base:] for f in files: try: - row = ( - get_file_size(os.path.sep.join((dir, f))), - os.path.sep.join((relative, f)), - ) + path = os.path.sep.join((dir, f)) + row = { + "Size": get_file_size(path), + "Name": os.path.sep.join((relative, f)), + "SHA256": get_file_digest(path), + } output.append(row) except os.error: pass - output.sort(key=operator.itemgetter(0), reverse=True) - for row in output: - print "%12d %s" % row + output.sort(key=operator.itemgetter("Size", "Name"), reverse=True) + print json.dumps(output, indent=2, separators=(',',': ')) if __name__ == '__main__': main(sys.argv) diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py new file mode 100755 index 00000000000..ff40d51a021 --- /dev/null +++ b/tools/fileslist_util.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +# +# Copyright (C) 2016 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import getopt, json, sys + +def PrintFileNames(path): + with open(path) as jf: + data = json.load(jf) + for line in data: + print(line["Name"]) + +def PrintCanonicalList(path): + with open(path) as jf: + data = json.load(jf) + for line in data: + print "{0:12d} {1}".format(line["Size"], line["Name"]) + +def PrintUsage(name): + print(""" +Usage: %s -[nc] json_files_list + -n produces list of files only + -c produces classic installed-files.txt +""" % (name)) + +def main(argv): + try: + opts, args = getopt.getopt(argv[1:], "nc", "") + except getopt.GetoptError, err: + print(err) + PrintUsage(argv[0]) + sys.exit(2) + + if len(opts) == 0: + print("No conversion option specified") + PrintUsage(argv[0]) + sys.exit(2) + + if len(args) == 0: + print("No input file specified") + PrintUsage(argv[0]) + sys.exit(2) + + for o, a in opts: + if o == ("-n"): + PrintFileNames(args[0]) + sys.exit() + elif o == ("-c"): + PrintCanonicalList(args[0]) + sys.exit() + else: + assert False, "Unsupported option" + +if __name__ == '__main__': + main(sys.argv) diff --git a/tools/filter-product-graph.py b/tools/filter-product-graph.py index b3a5b42c39f..d6100d8a623 100755 --- a/tools/filter-product-graph.py +++ b/tools/filter-product-graph.py @@ -1,6 +1,8 @@ #!/usr/bin/env python # vim: ts=2 sw=2 nocindent +from __future__ import print_function + import re import sys @@ -55,13 +57,13 @@ def info(m): deps = [dep for dep in deps if dep[1] in included] infos = [info for info in infos if info[0] in included] - print "digraph {" - print "graph [ ratio=.5 ];" + print("digraph {") + print("graph [ ratio=.5 ];") for dep in deps: - print '"%s" -> "%s"' % dep + print('"%s" -> "%s"' % dep) for info in infos: - print '"%s"%s' % info - print "}" + print('"%s"%s' % info) + print("}") if __name__ == "__main__": diff --git a/tools/findleaves.py b/tools/findleaves.py index 3a9e5084e2b..d97ed744d29 100755 --- a/tools/findleaves.py +++ b/tools/findleaves.py @@ -20,12 +20,14 @@ # the search in a given subdirectory when the file is found. # +from __future__ import print_function + import os import sys def perform_find(mindepth, prune, dirlist, filename): result = [] - pruneleaves = set(map(lambda x: os.path.split(x)[1], prune)) + pruneleaves = set([os.path.split(x)[1] for x in prune]) for rootdir in dirlist: rootdepth = rootdir.count("/") for root, dirs, files in os.walk(rootdir, followlinks=True): @@ -92,7 +94,7 @@ def main(argv): results = list(set(perform_find(mindepth, prune, dirlist, filename))) results.sort() for r in results: - print r + print(r) if __name__ == "__main__": main(sys.argv) diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py index 5b13bf53dea..a4853e63b3d 100755 --- a/tools/generate-notice-files.py +++ b/tools/generate-notice-files.py @@ -20,6 +20,9 @@ -h to display this usage message and exit. """ + +from __future__ import print_function + from collections import defaultdict import getopt import hashlib @@ -40,22 +43,22 @@ try: opts, args = getopt.getopt(sys.argv[1:], "h") -except getopt.GetoptError, err: - print str(err) - print __doc__ +except getopt.GetoptError as err: + print(str(err)) + print(__doc__) sys.exit(2) for o, a in opts: if o == "-h": - print __doc__ + print(__doc__) sys.exit(2) else: - print >> sys.stderr, "unhandled option %s" % (o,) + print("unhandled option %s" % o) if len(args) != 4: - print """need exactly four arguments, the two output files, the file title - and the directory containing notices, not %d""" % (len(args),) - print __doc__ + print("""need exactly four arguments, the two output files, the file title + and the directory containing notices, not %d""" % len(args)) + print(__doc__) sys.exit(1) def hexify(s): @@ -107,13 +110,13 @@ def combine_notice_files_html(file_hash, input_dir, output_filename): # Open the output file, and output the header pieces output_file = open(output_filename, "wb") - print >> output_file, "" - print >> output_file, HTML_OUTPUT_CSS - print >> output_file, '' + print(output_file, "", file=output_file) + print(HTML_OUTPUT_CSS, file=output_file) + print('', file=output_file) # Output our table of contents - print >> output_file, '
' - print >> output_file, "
    " + print('
    ', file=output_file) + print("
      ", file=output_file) # Flatten the list of lists into a single list of filenames sorted_filenames = sorted(itertools.chain.from_iterable(file_hash)) @@ -121,31 +124,31 @@ def combine_notice_files_html(file_hash, input_dir, output_filename): # Print out a nice table of contents for filename in sorted_filenames: stripped_filename = SRC_DIR_STRIP_RE.sub(r"\1", filename) - print >> output_file, '
    • %s
    • ' % (id_table.get(filename), stripped_filename) + print('
    • %s
    • ' % (id_table.get(filename), stripped_filename), file=output_file) - print >> output_file, "
    " - print >> output_file, "
    " + print("
", file=output_file) + print("
", file=output_file) # Output the individual notice file lists - print >>output_file, '' + print('
', file=output_file) for value in file_hash: - print >> output_file, '" - print >> output_file - print >> output_file - print >> output_file + print("%s
" % (SRC_DIR_STRIP_RE.sub(r"\1", filename)), file=output_file) + print("", file=output_file) + print(file=output_file) + print('
', file=output_file)
+        print(html_escape(open(value[0]).read()), file=output_file)
+        print("
", file=output_file) + print("", file=output_file) + print(file=output_file) + print(file=output_file) + print(file=output_file) # Finish off the file output - print >> output_file, "
' % id_table.get(value[0]) - print >> output_file, '
Notices for file(s):
' - print >> output_file, '
' + print('
' % id_table.get(value[0]), file=output_file) + print('
Notices for file(s):
', file=output_file) + print('
', file=output_file) for filename in value: - print >> output_file, "%s
" % (SRC_DIR_STRIP_RE.sub(r"\1", filename)) - print >> output_file, "
" - print >> output_file - print >> output_file, '
'
-        print >> output_file, html_escape(open(value[0]).read())
-        print >> output_file, "
" - print >> output_file, "
" - print >> output_file, "" + print( "", file=output_file) + print("", file=output_file) output_file.close() def combine_notice_files_text(file_hash, input_dir, output_filename, file_title): @@ -153,14 +156,14 @@ def combine_notice_files_text(file_hash, input_dir, output_filename, file_title) SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt") output_file = open(output_filename, "wb") - print >> output_file, file_title + print(file_title, file=output_file) for value in file_hash: - print >> output_file, "============================================================" - print >> output_file, "Notices for file(s):" + print("============================================================", file=output_file) + print("Notices for file(s):", file=output_file) for filename in value: - print >> output_file, SRC_DIR_STRIP_RE.sub(r"\1", filename) - print >> output_file, "------------------------------------------------------------" - print >> output_file, open(value[0]).read() + print(SRC_DIR_STRIP_RE.sub(r"\1", filename), file=output_file) + print("------------------------------------------------------------", file=output_file) + print(open(value[0]).read(), file=output_file) output_file.close() def main(args): @@ -180,9 +183,9 @@ def main(args): filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(files_with_same_hash.keys())] - print "Combining NOTICE files into HTML" + print("Combining NOTICE files into HTML") combine_notice_files_html(filesets, input_dir, html_output_file) - print "Combining NOTICE files into text" + print("Combining NOTICE files into text") combine_notice_files_text(filesets, input_dir, txt_output_file, file_title) if __name__ == "__main__": diff --git a/tools/getb64key.py b/tools/getb64key.py new file mode 100755 index 00000000000..976a1576972 --- /dev/null +++ b/tools/getb64key.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python + +from __future__ import print_function + +import base64 +import sys + +pkFile = open(sys.argv[1], 'rb').readlines() +base64Key = "" +inCert = False +for line in pkFile: + if line.startswith(b"-"): + inCert = not inCert + continue + + base64Key += line.strip() + +print(base64.b16encode(base64.b64decode(base64Key)).lower()) diff --git a/tools/ijar/Android.bp b/tools/ijar/Android.bp new file mode 100644 index 00000000000..a244a2ddbbf --- /dev/null +++ b/tools/ijar/Android.bp @@ -0,0 +1,18 @@ +// Copyright 2015 The Android Open Source Project +// +// The rest of files in this directory comes from +// https://github.com/bazelbuild/bazel/tree/master/third_party/ijar + +cc_binary_host { + srcs: [ + "classfile.cc", + "ijar.cc", + "zip.cc", + ], + cflags: [ + "-Wall", + "-Werror", + ], + host_ldlibs: ["-lz"], + name: "ijar", +} diff --git a/tools/java-event-log-tags.py b/tools/java-event-log-tags.py index f364751c4fb..24bad3c3095 100755 --- a/tools/java-event-log-tags.py +++ b/tools/java-event-log-tags.py @@ -23,37 +23,43 @@ -h to display this usage message and exit. """ -import cStringIO +from __future__ import print_function + import getopt import os import os.path import re import sys +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + import event_log_tags output_file = None try: opts, args = getopt.getopt(sys.argv[1:], "ho:") -except getopt.GetoptError, err: - print str(err) - print __doc__ +except getopt.GetoptError as err: + print(str(err)) + print(__doc__) sys.exit(2) for o, a in opts: if o == "-h": - print __doc__ + print(__doc__) sys.exit(2) elif o == "-o": output_file = a else: - print >> sys.stderr, "unhandled option %s" % (o,) + print("unhandled option %s" % o, file=sys.stderr) sys.exit(1) if len(args) != 2: - print "need exactly two input files, not %d" % (len(args),) - print __doc__ + print("need exactly two input files, not %d" % len(args)) + print(__doc__) sys.exit(1) fn = args[0] @@ -85,10 +91,10 @@ if tagfile.errors: for fn, ln, msg in tagfile.errors: - print >> sys.stderr, "%s:%d: error: %s" % (fn, ln, msg) + print("%s:%d: error: %s" % (fn, ln, msg), file=sys.stderr) sys.exit(1) -buffer = cStringIO.StringIO() +buffer = StringIO() buffer.write("/* This file is auto-generated. DO NOT MODIFY.\n" " * Source file: %s\n" " */\n\n" % (fn,)) @@ -143,7 +149,7 @@ def javaName(name): buffer.write("\n }\n") -buffer.write("}\n"); +buffer.write("}\n") output_dir = os.path.dirname(output_file) if not os.path.exists(output_dir): diff --git a/tools/java-layers.py b/tools/java-layers.py index b3aec2b1d5f..3b9d802e5bb 100755 --- a/tools/java-layers.py +++ b/tools/java-layers.py @@ -1,9 +1,18 @@ #!/usr/bin/env python +from __future__ import print_function + import os import re import sys + +def itervalues(obj): + if hasattr(obj, 'itervalues'): + return obj.itervalues() + return obj.values() + + def fail_with_usage(): sys.stderr.write("usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\n") sys.stderr.write("\n") @@ -69,27 +78,27 @@ def recurse(obj, dep, visited): if upper in deps: recurse(obj, deps[upper], visited) self.deps = deps - self.parts = [(dep.lower.split('.'),dep) for dep in deps.itervalues()] + self.parts = [(dep.lower.split('.'),dep) for dep in itervalues(deps)] # transitive closure of dependencies - for dep in deps.itervalues(): + for dep in itervalues(deps): recurse(dep, dep, []) # disallow everything from the low level components - for dep in deps.itervalues(): + for dep in itervalues(deps): if dep.lowlevel: - for d in deps.itervalues(): + for d in itervalues(deps): if dep != d and not d.legacy: dep.transitive.add(d.lower) # disallow the 'top' components everywhere but in their own package - for dep in deps.itervalues(): + for dep in itervalues(deps): if dep.top and not dep.legacy: - for d in deps.itervalues(): + for d in itervalues(deps): if dep != d and not d.legacy: d.transitive.add(dep.lower) - for dep in deps.itervalues(): + for dep in itervalues(deps): dep.transitive = set([x+"." for x in dep.transitive]) if False: - for dep in deps.itervalues(): - print "-->", dep.lower, "-->", dep.transitive + for dep in itervalues(deps): + print("-->", dep.lower, "-->", dep.transitive) # Lookup the dep object for the given package. If pkg is a subpackage # of one with a rule, that one will be returned. If no matches are found, @@ -117,7 +126,7 @@ def compare_parts(parts, pkg): def parse_dependency_file(filename): global err - f = file(filename) + f = open(filename) lines = f.readlines() f.close() def lineno(s, i): @@ -171,7 +180,7 @@ def find_java_files(srcs): result = [] for d in srcs: if d[0] == '@': - f = file(d[1:]) + f = open(d[1:]) result.extend([fn for fn in [s.strip() for s in f.readlines()] if len(fn) != 0]) f.close() @@ -188,7 +197,7 @@ def find_java_files(srcs): def examine_java_file(deps, filename): global err # Yes, this is a crappy java parser. Write a better one if you want to. - f = file(filename) + f = open(filename) text = f.read() f.close() text = COMMENTS.sub("", text) @@ -218,8 +227,8 @@ def examine_java_file(deps, filename): imports.append(m.group(1)) # Do the checking if False: - print filename - print "'%s' --> %s" % (pkg, imports) + print(filename) + print("'%s' --> %s" % (pkg, imports)) dep = deps.lookup(pkg) if not dep: sys.stderr.write(("%s: Error: Package does not appear in dependency file: " diff --git a/tools/makeparallel/Makefile b/tools/makeparallel/Makefile index 4e12b10f2a8..82a4abfac2e 100644 --- a/tools/makeparallel/Makefile +++ b/tools/makeparallel/Makefile @@ -65,8 +65,9 @@ MAKEPARALLEL_NINJA_TEST := MAKEFLAGS= MAKELEVEL= MAKEPARALLEL="$(MAKEPARALLEL) - makeparallel_test: $(MAKEPARALLEL) @EXPECTED="-j1234" $(MAKEPARALLEL_TEST) -j1234 @EXPECTED="-j123" $(MAKEPARALLEL_TEST) -j123 - @EXPECTED="-j1" $(MAKEPARALLEL_TEST) -j1 - @EXPECTED="-j1" $(MAKEPARALLEL_TEST) + @EXPECTED="" $(MAKEPARALLEL_TEST) -j1 + @EXPECTED="-j$$(($$(nproc) + 2))" $(MAKEPARALLEL_TEST) -j + @EXPECTED="" $(MAKEPARALLEL_TEST) @EXPECTED="-j1234" $(MAKEPARALLEL_NINJA_TEST) -j1234 @EXPECTED="-j123" $(MAKEPARALLEL_NINJA_TEST) -j123 @@ -87,8 +88,6 @@ makeparallel_test: $(MAKEPARALLEL) @EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) -j1234 -k @EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) -kt -j1234 - @EXPECTED="-j1" $(MAKEPARALLEL_TEST) A=-j1234 - @EXPECTED="-j1" $(MAKEPARALLEL_TEST) A\ -j1234=-j1234 - @EXPECTED="-j1234" $(MAKEPARALLEL_TEST) A\ -j1234=-j1234 -j1234 + @EXPECTED="" $(MAKEPARALLEL_TEST) A=-j1234 @EXPECTED="-j1234 args" ARGS="args" $(MAKEPARALLEL_TEST) -j1234 diff --git a/tools/makeparallel/makeparallel.cpp b/tools/makeparallel/makeparallel.cpp index 3c39846ecd5..b910b058511 100644 --- a/tools/makeparallel/makeparallel.cpp +++ b/tools/makeparallel/makeparallel.cpp @@ -316,27 +316,67 @@ int main(int argc, char* argv[]) { } } - std::string jarg = "-j" + std::to_string(tokens + 1); + std::string jarg; + if (parallel) { + if (tokens == 0) { + if (ninja) { + // ninja is parallel by default + jarg = ""; + } else { + // make -j with no argument, guess a reasonable parallelism like ninja does + jarg = "-j" + std::to_string(sysconf(_SC_NPROCESSORS_ONLN) + 2); + } + } else { + jarg = "-j" + std::to_string(tokens + 1); + } + } + if (ninja) { if (!parallel) { // ninja is parallel by default, pass -j1 to disable parallelism if make wasn't parallel args.push_back(strdup("-j1")); - } else if (tokens > 0) { - args.push_back(strdup(jarg.c_str())); + } else { + if (jarg != "") { + args.push_back(strdup(jarg.c_str())); + } } if (keep_going) { args.push_back(strdup("-k0")); } } else { - args.push_back(strdup(jarg.c_str())); + if (jarg != "") { + args.push_back(strdup(jarg.c_str())); + } } args.insert(args.end(), &argv[2], &argv[argc]); args.push_back(nullptr); - pid_t pid = fork(); + static pid_t pid; + + // Set up signal handlers to forward SIGHUP, SIGINT, SIGQUIT, SIGTERM, and + // SIGALRM to child + struct sigaction action = {}; + action.sa_flags = SA_SIGINFO | SA_RESTART, + action.sa_sigaction = [](int signal, siginfo_t*, void*) { + if (pid > 0) { + kill(pid, signal); + } + }; + + int ret = 0; + if (!ret) ret = sigaction(SIGHUP, &action, NULL); + if (!ret) ret = sigaction(SIGINT, &action, NULL); + if (!ret) ret = sigaction(SIGQUIT, &action, NULL); + if (!ret) ret = sigaction(SIGTERM, &action, NULL); + if (!ret) ret = sigaction(SIGALRM, &action, NULL); + if (ret < 0) { + error(errno, errno, "sigaction failed"); + } + + pid = fork(); if (pid < 0) { error(errno, errno, "fork failed"); } else if (pid == 0) { @@ -351,9 +391,10 @@ int main(int argc, char* argv[]) { } // parent + siginfo_t status = {}; int exit_status = 0; - int ret = waitid(P_PID, pid, &status, WEXITED); + ret = waitid(P_PID, pid, &status, WEXITED); if (ret < 0) { error(errno, errno, "waitpid failed"); } else if (status.si_code == CLD_EXITED) { diff --git a/tools/merge-event-log-tags.py b/tools/merge-event-log-tags.py index 64bad3f8133..77900484119 100755 --- a/tools/merge-event-log-tags.py +++ b/tools/merge-event-log-tags.py @@ -24,7 +24,8 @@ -h to display this usage message and exit. """ -import cStringIO +from __future__ import print_function + import getopt try: import hashlib @@ -33,8 +34,20 @@ import struct import sys +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + import event_log_tags + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + + errors = [] warnings = [] @@ -48,21 +61,21 @@ try: opts, args = getopt.getopt(sys.argv[1:], "ho:m:") -except getopt.GetoptError, err: - print str(err) - print __doc__ +except getopt.GetoptError as err: + print(str(err)) + print(__doc__) sys.exit(2) for o, a in opts: if o == "-h": - print __doc__ + print(__doc__) sys.exit(2) elif o == "-o": output_file = a elif o == "-m": pre_merged_file = a else: - print >> sys.stderr, "unhandled option %s" % (o,) + print("unhandled option %s" % (o,), file=sys.stderr) sys.exit(1) # Restrictions on tags: @@ -133,12 +146,12 @@ if errors: for fn, ln, msg in errors: - print >> sys.stderr, "%s:%d: error: %s" % (fn, ln, msg) + print("%s:%d: error: %s" % (fn, ln, msg), file=sys.stderr) sys.exit(1) if warnings: for fn, ln, msg in warnings: - print >> sys.stderr, "%s:%d: warning: %s" % (fn, ln, msg) + print("%s:%d: warning: %s" % (fn, ln, msg), file=sys.stderr) # Python's hash function (a) isn't great and (b) varies between # versions of python. Using md5 is overkill here but is the same from @@ -154,14 +167,14 @@ def hashname(str): # If we were provided pre-merged tags (w/ the -m option), then don't # ever try to allocate one, just fail if we don't have a number -for name, t in sorted(by_tagname.iteritems()): +for name, t in sorted(iteritems(by_tagname)): if t.tagnum is None: if pre_merged_tags: try: t.tagnum = pre_merged_tags[t.tagname] except KeyError: - print >> sys.stderr, ("Error: Tag number not defined for tag `%s'." - +" Have you done a full build?") % t.tagname + print("Error: Tag number not defined for tag `%s'." + " Have you done a full build?" % t.tagname, file=sys.stderr) sys.exit(1) else: while True: @@ -174,8 +187,8 @@ def hashname(str): # by_tagnum should be complete now; we've assigned numbers to all tags. -buffer = cStringIO.StringIO() -for n, t in sorted(by_tagnum.iteritems()): +buffer = StringIO() +for n, t in sorted(iteritems(by_tagnum)): if t.description: buffer.write("%d %s %s\n" % (t.tagnum, t.tagname, t.description)) else: diff --git a/tools/normalize_path.py b/tools/normalize_path.py index 6c4d5486dc7..4c7b1294612 100755 --- a/tools/normalize_path.py +++ b/tools/normalize_path.py @@ -16,14 +16,15 @@ """ Normalize and output paths from arguments, or stdin if no arguments provided. """ +from __future__ import print_function import os.path import sys if len(sys.argv) > 1: for p in sys.argv[1:]: - print os.path.normpath(p) + print(os.path.normpath(p)) sys.exit(0) for line in sys.stdin: - print os.path.normpath(line.strip()) + print(os.path.normpath(line.strip())) diff --git a/tools/parsedeps.py b/tools/parsedeps.py index 32d8ad7d67e..d36442b57e0 100755 --- a/tools/parsedeps.py +++ b/tools/parsedeps.py @@ -1,10 +1,16 @@ #!/usr/bin/env python # vim: ts=2 sw=2 +from __future__ import print_function + import optparse -import re import sys +try: + raw_input +except NameError: + raw_input = input + class Dependency: def __init__(self, tgt): @@ -43,13 +49,15 @@ def setPos(self, tgt, pos): t.pos = pos def get(self, tgt): - if self.lines.has_key(tgt): + if tgt in self.lines: return self.lines[tgt] else: return None def __iter__(self): - return self.lines.iteritems() + if hasattr(self.lines, 'iteritems'): + return self.lines.iteritems() + return iter(self.lines.items()) def trace(self, tgt, prereq): self.__visit = self.__visit + 1 @@ -73,9 +81,9 @@ def __trace(self, d, prereq): return result def help(): - print "Commands:" - print " dep TARGET Print the prerequisites for TARGET" - print " trace TARGET PREREQ Print the paths from TARGET to PREREQ" + print("Commands:") + print(" dep TARGET Print the prerequisites for TARGET") + print(" trace TARGET PREREQ Print the paths from TARGET to PREREQ") def main(argv): @@ -87,7 +95,7 @@ def main(argv): deps = Dependencies() filename = args[0] - print "Reading %s" % filename + print("Reading %s" % filename) if True: f = open(filename) @@ -106,7 +114,7 @@ def main(argv): deps.add(tgt, prereq) f.close() - print "Read %d dependencies. %d targets." % (deps.count, len(deps.lines)) + print("Read %d dependencies. %d targets." % (deps.count, len(deps.lines))) while True: line = raw_input("target> ") if not line.strip(): @@ -118,12 +126,12 @@ def main(argv): d = deps.get(tgt) if d: for prereq in d.prereqs: - print prereq.tgt + print(prereq.tgt) elif len(split) == 3 and cmd == "trace": tgt = split[1] prereq = split[2] if False: - print "from %s to %s" % (tgt, prereq) + print("from %s to %s" % (tgt, prereq)) trace = deps.trace(tgt, prereq) if trace: width = 0 @@ -134,10 +142,10 @@ def main(argv): for g in trace: for t in g: if t.pos: - print t.tgt, " " * (width-len(t.tgt)), " #", t.pos + print(t.tgt, " " * (width-len(t.tgt)), " #", t.pos) else: - print t.tgt - print + print(t.tgt) + print() else: help() @@ -145,7 +153,6 @@ def main(argv): try: main(sys.argv) except KeyboardInterrupt: - print + print() except EOFError: - print - + print() diff --git a/tools/post_process_props.py b/tools/post_process_props.py index 9dcaadfd0d3..aa67755f804 100755 --- a/tools/post_process_props.py +++ b/tools/post_process_props.py @@ -16,6 +16,13 @@ import sys + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + + # Usage: post_process_props.py file.prop [blacklist_key, ...] # Blacklisted keys are removed from the property file, if present @@ -27,15 +34,22 @@ # Put the modifications that you need to make into the /system/build.prop into this # function. The prop object has get(name) and put(name,value) methods. -def mangle_build_prop(prop): +def mangle_build_prop(prop, overrides): + if len(overrides) == 0: + return + overridelist = overrides.replace(" ",",").split(",") + for proppair in overridelist: + values = proppair.split("=") + prop.put(values[0], values[1]) + pass # Put the modifications that you need to make into the /default.prop into this # function. The prop object has get(name) and put(name,value) methods. def mangle_default_prop(prop): - # If ro.debuggable is 1, then enable adb on USB by default - # (this is for userdebug builds) - if prop.get("ro.debuggable") == "1": + # If ro.adb.secure is not 1, then enable adb on USB by default + # (this is for eng builds) + if prop.get("ro.adb.secure") != "1": val = prop.get("persist.sys.usb.config") if "adb" not in val: if val == "": @@ -47,7 +61,7 @@ def mangle_default_prop(prop): # default to "adb". That might not the right policy there, but it's better # to be explicit. if not prop.get("persist.sys.usb.config"): - prop.put("persist.sys.usb.config", "none"); + prop.put("persist.sys.usb.config", "none") def validate(prop): """Validate the properties. @@ -57,7 +71,7 @@ def validate(prop): """ check_pass = True buildprops = prop.to_dict() - for key, value in buildprops.iteritems(): + for key, value in iteritems(buildprops): # Check build properties' length. if len(key) > PROP_NAME_MAX: check_pass = False @@ -111,6 +125,10 @@ def write(self, f): def main(argv): filename = argv[1] + if (len(argv) > 2): + extraargs = argv[2] + else: + extraargs = "" f = open(filename) lines = f.readlines() f.close() @@ -118,7 +136,7 @@ def main(argv): properties = PropFile(lines) if filename.endswith("/build.prop"): - mangle_build_prop(properties) + mangle_build_prop(properties, extraargs) elif filename.endswith("/default.prop"): mangle_default_prop(properties) else: @@ -129,7 +147,7 @@ def main(argv): sys.exit(1) # Drop any blacklisted keys - for key in argv[2:]: + for key in argv[3:]: properties.delete(key) f = open(filename, 'w+') diff --git a/tools/product_debug.py b/tools/product_debug.py index ff2657c6d8b..1433a9ac289 100755 --- a/tools/product_debug.py +++ b/tools/product_debug.py @@ -14,13 +14,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os +from __future__ import print_function + +from operator import itemgetter import re import sys + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + + def break_lines(key, val): # these don't get split - if key in ("PRODUCT_MODEL"): + if key in ("PRODUCT_MODEL",): return (key,val) return (key, "\n".join(val.split())) @@ -42,8 +51,7 @@ def parse_variables(lines): def render_variables(variables): variables = dict(variables) del variables["FILE"] - variables = list(variables.iteritems()) - variables.sort(lambda a, b: cmp(a[0], b[0])) + variables = sorted(variables.items(), key=itemgetter(0)) return ("" + "\n".join([ "" % { "key": key, "val": val } for key,val in variables]) @@ -69,7 +77,7 @@ def render_original(variables, text): return text def read_file(fn): - f = file(fn) + f = open(fn) text = f.read() f.close() return text @@ -104,7 +112,7 @@ def main(argv): "variables": render_variables(variables), "original": render_original(variables, original), }) - print """ + print(""" @@ -153,7 +161,7 @@ def main(argv): %(variables)s -""" % values +""" % values) if __name__ == "__main__": main(sys.argv) diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py index 6acc69dbded..0140c1fa085 100755 --- a/tools/releasetools/add_img_to_target_files.py +++ b/tools/releasetools/add_img_to_target_files.py @@ -19,13 +19,39 @@ not have an IMAGES/ top-level subdirectory), produce the images and add them to the zipfile. -Usage: add_img_to_target_files target_files +Usage: add_img_to_target_files [flag] target_files + + -a (--add_missing) + Build and add missing images to "IMAGES/". If this option is + not specified, this script will simply exit when "IMAGES/" + directory exists in the target file. + + -r (--rebuild_recovery) + Rebuild the recovery patch and write it to the system image. Only + meaningful when system image needs to be rebuilt. + + --replace_verity_private_key + Replace the private key used for verity signing. (same as the option + in sign_target_files_apks) + + --replace_verity_public_key + Replace the certificate (public key) used for verity verification. (same + as the option in sign_target_files_apks) + + --is_signing + Skip building & adding the images for "userdata" and "cache" if we + are signing the target files. + + --verity_signer_path + Specify the signer path to build verity metadata. """ +from __future__ import print_function + import sys if sys.hexversion < 0x02070000: - print >> sys.stderr, "Python 2.7 or newer is required." + print("Python 2.7 or newer is required.", file=sys.stderr) sys.exit(1) import datetime @@ -45,6 +71,7 @@ OPTIONS.rebuild_recovery = False OPTIONS.replace_verity_public_key = False OPTIONS.replace_verity_private_key = False +OPTIONS.is_signing = False OPTIONS.verity_signer_path = None def GetCareMap(which, imgname): @@ -66,7 +93,7 @@ def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None): prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "system.img") if os.path.exists(prebuilt_path): - print "system.img already exists in %s, no need to rebuild..." % (prefix,) + print("system.img already exists in %s, no need to rebuild..." % prefix) return prebuilt_path def output_sink(fn, data): @@ -75,7 +102,7 @@ def output_sink(fn, data): ofile.close() if OPTIONS.rebuild_recovery: - print "Building new recovery patch" + print("Building new recovery patch") common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img, boot_img, info_dict=OPTIONS.info_dict) @@ -99,7 +126,7 @@ def AddSystemOther(output_zip, prefix="IMAGES/"): prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "system_other.img") if os.path.exists(prebuilt_path): - print "system_other.img already exists in %s, no need to rebuild..." % (prefix,) + print("system_other.img already exists in %s, no need to rebuild..." % prefix) return imgname = BuildSystemOther(OPTIONS.input_tmp, OPTIONS.info_dict) @@ -117,7 +144,7 @@ def AddVendor(output_zip, prefix="IMAGES/"): prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "vendor.img") if os.path.exists(prebuilt_path): - print "vendor.img already exists in %s, no need to rebuild..." % (prefix,) + print("vendor.img already exists in %s, no need to rebuild..." % prefix) return prebuilt_path block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map") @@ -133,9 +160,32 @@ def BuildVendor(input_dir, info_dict, block_list=None): file containing it.""" return CreateImage(input_dir, info_dict, "vendor", block_list=block_list) +def AddOem(output_zip, prefix="IMAGES/"): + """Turn the contents of OEM into a oem image and store in it + output_zip.""" + + prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "oem.img") + if os.path.exists(prebuilt_path): + print("oem.img already exists in %s, no need to rebuild..." % (prefix,)) + return + + block_list = common.MakeTempFile(prefix="oem-blocklist-", suffix=".map") + imgname = BuildOem(OPTIONS.input_tmp, OPTIONS.info_dict, + block_list=block_list) + with open(imgname, "rb") as f: + common.ZipWriteStr(output_zip, prefix + "oem.img", f.read()) + with open(block_list, "rb") as f: + common.ZipWriteStr(output_zip, prefix + "oem.map", f.read()) + + +def BuildOem(input_dir, info_dict, block_list=None): + """Build the (sparse) oem image and return the name of a temp + file containing it.""" + return CreateImage(input_dir, info_dict, "oem", block_list=block_list) + def CreateImage(input_dir, info_dict, what, block_list=None): - print "creating " + what + ".img..." + print("creating " + what + ".img...") img = common.MakeTempFile(prefix=what + "-", suffix=".img") @@ -199,7 +249,7 @@ def AddUserdata(output_zip, prefix="IMAGES/"): prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "userdata.img") if os.path.exists(prebuilt_path): - print "userdata.img already exists in %s, no need to rebuild..." % (prefix,) + print("userdata.img already exists in %s, no need to rebuild..." % prefix) return image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "data") @@ -209,7 +259,7 @@ def AddUserdata(output_zip, prefix="IMAGES/"): not image_props.get("partition_size")): return - print "creating userdata.img..." + print("creating userdata.img...") # Use a fixed timestamp (01/01/2009) when packaging the image. # Bug: 24377993 @@ -246,12 +296,59 @@ def AddUserdata(output_zip, prefix="IMAGES/"): shutil.rmtree(temp_dir) +def AddUserdataExtra(output_zip, prefix="IMAGES/"): + """Create extra userdata image and store it in output_zip.""" + + image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, + "data_extra") + + # The build system has to explicitly request extra userdata. + if "fs_type" not in image_props: + return + + extra_name = image_props.get("partition_name", "extra") + + prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "userdata_%s.img" % extra_name) + if os.path.exists(prebuilt_path): + print("userdata_%s.img already exists in %s, no need to rebuild..." % (extra_name, prefix,)) + return + + # We only allow yaffs to have a 0/missing partition_size. + # Extfs, f2fs must have a size. Skip userdata_extra.img if no size. + if (not image_props.get("fs_type", "").startswith("yaffs") and + not image_props.get("partition_size")): + return + + print("creating userdata_%s.img..." % extra_name) + + # The name of the directory it is making an image out of matters to + # mkyaffs2image. So we create a temp dir, and within it we create an + # empty dir named "data", and build the image from that. + temp_dir = tempfile.mkdtemp() + user_dir = os.path.join(temp_dir, "data") + os.mkdir(user_dir) + img = tempfile.NamedTemporaryFile() + + fstab = OPTIONS.info_dict["fstab"] + if fstab: + image_props["fs_type" ] = fstab["/data"].fs_type + succ = build_image.BuildImage(user_dir, image_props, img.name) + assert succ, "build userdata_%s.img image failed" % extra_name + + # Disable size check since this fetches original data partition size + #common.CheckSize(img.name, "userdata_extra.img", OPTIONS.info_dict) + output_zip.write(img.name, prefix + "userdata_%s.img" % extra_name) + img.close() + os.rmdir(user_dir) + os.rmdir(temp_dir) + + def AddCache(output_zip, prefix="IMAGES/"): """Create an empty cache image and store it in output_zip.""" prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "cache.img") if os.path.exists(prebuilt_path): - print "cache.img already exists in %s, no need to rebuild..." % (prefix,) + print("cache.img already exists in %s, no need to rebuild..." % prefix) return image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache") @@ -259,7 +356,7 @@ def AddCache(output_zip, prefix="IMAGES/"): if "fs_type" not in image_props: return - print "creating cache.img..." + print("creating cache.img...") # Use a fixed timestamp (01/01/2009) when packaging the image. # Bug: 24377993 @@ -294,7 +391,7 @@ def AddImagesToTargetFiles(filename): if not OPTIONS.add_missing: for n in input_zip.namelist(): if n.startswith("IMAGES/"): - print "target_files appears to already contain images." + print("target_files appears to already contain images.") sys.exit(1) try: @@ -303,8 +400,15 @@ def AddImagesToTargetFiles(filename): except KeyError: has_vendor = False + try: + input_zip.getinfo("OEM/") + has_oem = True + except KeyError: + has_oem = False + has_system_other = "SYSTEM_OTHER/" in input_zip.namelist() + OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp) common.ZipClose(input_zip) @@ -314,13 +418,13 @@ def AddImagesToTargetFiles(filename): has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true") def banner(s): - print "\n\n++++ " + s + " ++++\n\n" + print("\n\n++++ " + s + " ++++\n\n") banner("boot") prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img") boot_image = None if os.path.exists(prebuilt_path): - print "boot.img already exists in IMAGES/, no need to rebuild..." + print("boot.img already exists in IMAGES/, no need to rebuild...") if OPTIONS.rebuild_recovery: boot_image = common.GetBootableImage( "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT") @@ -335,7 +439,7 @@ def banner(s): banner("recovery") prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "recovery.img") if os.path.exists(prebuilt_path): - print "recovery.img already exists in IMAGES/, no need to rebuild..." + print("recovery.img already exists in IMAGES/, no need to rebuild...") if OPTIONS.rebuild_recovery: recovery_image = common.GetBootableImage( "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, @@ -346,6 +450,14 @@ def banner(s): if recovery_image: recovery_image.AddToZip(output_zip) + banner("recovery (two-step image)") + # The special recovery.img for two-step package use. + recovery_two_step_image = common.GetBootableImage( + "IMAGES/recovery-two-step.img", "recovery-two-step.img", + OPTIONS.input_tmp, "RECOVERY", two_step_image=True) + if recovery_two_step_image: + recovery_two_step_image.AddToZip(output_zip) + banner("system") system_imgname = AddSystem(output_zip, recovery_img=recovery_image, boot_img=boot_image) @@ -356,10 +468,17 @@ def banner(s): if has_system_other: banner("system_other") AddSystemOther(output_zip) - banner("userdata") - AddUserdata(output_zip) - banner("cache") - AddCache(output_zip) + if not OPTIONS.is_signing: + banner("userdata") + AddUserdata(output_zip) + banner("extrauserdata") + AddUserdataExtra(output_zip) + banner("cache") + AddCache(output_zip) + if has_oem: + banner("oem") + AddOem(output_zip) + # For devices using A/B update, copy over images from RADIO/ to IMAGES/ and # make sure we have all the needed images ready under IMAGES/. @@ -406,6 +525,8 @@ def option_handler(o, a): OPTIONS.replace_verity_private_key = (True, a) elif o == "--replace_verity_public_key": OPTIONS.replace_verity_public_key = (True, a) + elif o == "--is_signing": + OPTIONS.is_signing = True elif o == "--verity_signer_path": OPTIONS.verity_signer_path = a else: @@ -417,6 +538,7 @@ def option_handler(o, a): extra_long_opts=["add_missing", "rebuild_recovery", "replace_verity_public_key=", "replace_verity_private_key=", + "is_signing", "verity_signer_path="], extra_option_handler=option_handler) @@ -426,16 +548,16 @@ def option_handler(o, a): sys.exit(1) AddImagesToTargetFiles(args[0]) - print "done." + print("done.") if __name__ == '__main__': try: common.CloseInheritedPipes() main(sys.argv[1:]) except common.ExternalError as e: - print - print " ERROR: %s" % (e,) - print + print() + print(" ERROR: %s" % e) + print() sys.exit(1) finally: common.Cleanup() diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py index 31dabc724e6..d26e0ebabed 100644 --- a/tools/releasetools/blockimgdiff.py +++ b/tools/releasetools/blockimgdiff.py @@ -438,7 +438,7 @@ def WriteSplitTransfers(out, style, target_blocks): stashes[sh] -= 1 if stashes[sh] == 0: free_size += sr.size() - free_string.append("free %s\n" % (sh)) + free_string.append("free %s\n" % sh) stashes.pop(sh) heapq.heappush(free_stash_ids, sid) @@ -697,17 +697,16 @@ def ComputePatches(self, prefix): for xf in self.transfers: if xf.style == "zero": tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize - print("%10d %10d (%6.2f%%) %7s %s %s" % ( - tgt_size, tgt_size, 100.0, xf.style, xf.tgt_name, - str(xf.tgt_ranges))) + print("%10d %10d (%6.2f%%) %7s %s" % ( + tgt_size, tgt_size, 100.0, xf.style, xf.tgt_name)) elif xf.style == "new": for piece in self.tgt.ReadRangeSet(xf.tgt_ranges): new_f.write(piece) tgt_size = xf.tgt_ranges.size() * self.tgt.blocksize - print("%10d %10d (%6.2f%%) %7s %s %s" % ( + print("%10d %10d (%6.2f%%) %7s %s" % ( tgt_size, tgt_size, 100.0, xf.style, - xf.tgt_name, str(xf.tgt_ranges))) + xf.tgt_name)) elif xf.style == "diff": src = self.src.ReadRangeSet(xf.src_ranges) @@ -1000,8 +999,11 @@ def FindVertexSequence(self): heap.append(xf.heap_item) heapq.heapify(heap) - sinks = set(u for u in G if not u.outgoing) - sources = set(u for u in G if not u.incoming) + # Use OrderedDict() instead of set() to preserve the insertion order. Need + # to use 'sinks[key] = None' to add key into the set. sinks will look like + # { key1: None, key2: None, ... }. + sinks = OrderedDict.fromkeys(u for u in G if not u.outgoing) + sources = OrderedDict.fromkeys(u for u in G if not u.incoming) def adjust_score(iu, delta): iu.score += delta @@ -1012,26 +1014,28 @@ def adjust_score(iu, delta): while G: # Put all sinks at the end of the sequence. while sinks: - new_sinks = set() + new_sinks = OrderedDict() for u in sinks: if u not in G: continue s2.appendleft(u) del G[u] for iu in u.incoming: adjust_score(iu, -iu.outgoing.pop(u)) - if not iu.outgoing: new_sinks.add(iu) + if not iu.outgoing: + new_sinks[iu] = None sinks = new_sinks # Put all the sources at the beginning of the sequence. while sources: - new_sources = set() + new_sources = OrderedDict() for u in sources: if u not in G: continue s1.append(u) del G[u] for iu in u.outgoing: adjust_score(iu, +iu.incoming.pop(u)) - if not iu.incoming: new_sources.add(iu) + if not iu.incoming: + new_sources[iu] = None sources = new_sources if not G: break @@ -1050,11 +1054,13 @@ def adjust_score(iu, delta): del G[u] for iu in u.outgoing: adjust_score(iu, +iu.incoming.pop(u)) - if not iu.incoming: sources.add(iu) + if not iu.incoming: + sources[iu] = None for iu in u.incoming: adjust_score(iu, -iu.outgoing.pop(u)) - if not iu.outgoing: sinks.add(iu) + if not iu.outgoing: + sinks[iu] = None # Now record the sequence in the 'order' field of each transfer, # and by rearranging self.transfers to be in the chosen sequence. @@ -1073,8 +1079,7 @@ def GenerateDigraph(self): # Each item of source_ranges will be: # - None, if that block is not used as a source, - # - a transfer, if one transfer uses it as a source, or - # - a set of transfers. + # - an ordered set of transfers. source_ranges = [] for b in self.transfers: for s, e in b.src_ranges: @@ -1082,23 +1087,19 @@ def GenerateDigraph(self): source_ranges.extend([None] * (e-len(source_ranges))) for i in range(s, e): if source_ranges[i] is None: - source_ranges[i] = b + source_ranges[i] = OrderedDict.fromkeys([b]) else: - if not isinstance(source_ranges[i], set): - source_ranges[i] = set([source_ranges[i]]) - source_ranges[i].add(b) + source_ranges[i][b] = None for a in self.transfers: - intersections = set() + intersections = OrderedDict() for s, e in a.tgt_ranges: for i in range(s, e): if i >= len(source_ranges): break - b = source_ranges[i] - if b is not None: - if isinstance(b, set): - intersections.update(b) - else: - intersections.add(b) + # Add all the Transfers in source_ranges[i] to the (ordered) set. + if source_ranges[i] is not None: + for j in source_ranges[i]: + intersections[j] = None for b in intersections: if a is b: continue diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py index a9217eed8bf..6c1fab89a5b 100755 --- a/tools/releasetools/build_image.py +++ b/tools/releasetools/build_image.py @@ -68,7 +68,7 @@ def GetVerityTreeSize(partition_size): return True, int(output) def GetVerityMetadataSize(partition_size): - cmd = "system/extras/verity/build_verity_metadata.py -s %d" + cmd = "system/extras/verity/build_verity_metadata.py size %d" cmd %= partition_size status, output = commands.getstatusoutput(cmd) @@ -109,7 +109,8 @@ def AdjustPartitionSizeForVerity(partition_size, fec_supported): Args: partition_size: the size of the partition to be verified. Returns: - The size of the partition adjusted for verity metadata. + A tuple of the size of the partition adjusted for verity metadata, and + the size of verity metadata. """ key = "%d %d" % (partition_size, fec_supported) if key in AdjustPartitionSizeForVerity.results: @@ -121,27 +122,31 @@ def AdjustPartitionSizeForVerity(partition_size, fec_supported): # verity tree and fec sizes depend on the partition size, which # means this estimate is always going to be unnecessarily small - lo = partition_size - GetVeritySize(hi, fec_supported) + verity_size = GetVeritySize(hi, fec_supported) + lo = partition_size - verity_size result = lo # do a binary search for the optimal size while lo < hi: i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE - size = i + GetVeritySize(i, fec_supported) - if size <= partition_size: + v = GetVeritySize(i, fec_supported) + if i + v <= partition_size: if result < i: result = i + verity_size = v lo = i + BLOCK_SIZE else: hi = i - AdjustPartitionSizeForVerity.results[key] = result - return result + AdjustPartitionSizeForVerity.results[key] = (result, verity_size) + return (result, verity_size) AdjustPartitionSizeForVerity.results = {} -def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path): - cmd = "fec -e %s %s %s" % (sparse_image_path, verity_path, verity_fec_path) +def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path, + padding_size): + cmd = "fec -e -p %d %s %s %s" % (padding_size, sparse_image_path, + verity_path, verity_fec_path) print cmd status, output = commands.getstatusoutput(cmd) if status: @@ -163,16 +168,36 @@ def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict): return True def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt, - block_device, signer_path, key): + block_device, signer_path, key, signer_args): + verity_key = os.getenv("PRODUCT_VERITY_KEY", None) + verity_key_password = None + + if verity_key and os.path.exists(verity_key+".pk8"): + verity_key_passwords = {} + verity_key_passwords.update(common.PasswordManager().GetPasswords(verity_key.split())) + verity_key_password = verity_key_passwords[verity_key] + cmd_template = ( - "system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s") + "system/extras/verity/build_verity_metadata.py build " + + "%s %s %s %s %s %s %s") cmd = cmd_template % (image_size, verity_metadata_path, root_hash, salt, block_device, signer_path, key) + if signer_args: + cmd += " --signer_args=\"%s\"" % (' '.join(signer_args),) print cmd - status, output = commands.getstatusoutput(cmd) - if status: - print "Could not build verity metadata! Error: %s" % output + runcmd = [str(a) for a in ["system/extras/verity/build_verity_metadata.py", "build", image_size, verity_metadata_path, root_hash, salt, block_device, signer_path, key]]; + if verity_key_password is not None: + sp = subprocess.Popen(runcmd, stdin=subprocess.PIPE) + sp.communicate(verity_key_password) + else: + sp = subprocess.Popen(runcmd) + + sp.wait() + + if sp.returncode != 0: + print("Could not build verity metadata!") return False + return True def Append2Simg(sparse_image_path, unsparse_image_path, error_message): @@ -204,7 +229,7 @@ def Append(target, file_to_append, error_message): def BuildVerifiedImage(data_image_path, verity_image_path, verity_metadata_path, verity_fec_path, - fec_supported): + padding_size, fec_supported): if not Append(verity_image_path, verity_metadata_path, "Could not append verity metadata!"): return False @@ -212,7 +237,7 @@ def BuildVerifiedImage(data_image_path, verity_image_path, if fec_supported: # build FEC for the entire partition, including metadata if not BuildVerityFEC(data_image_path, verity_image_path, - verity_fec_path): + verity_fec_path, padding_size): return False if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"): @@ -250,14 +275,14 @@ def MakeVerityEnabledImage(out_file, fec_supported, prop_dict): True on success, False otherwise. """ # get properties - image_size = prop_dict["partition_size"] + image_size = int(prop_dict["partition_size"]) block_dev = prop_dict["verity_block_device"] signer_key = prop_dict["verity_key"] + ".pk8" if OPTIONS.verity_signer_path is not None: - signer_path = OPTIONS.verity_signer_path + ' ' - signer_path += ' '.join(OPTIONS.verity_signer_args) + signer_path = OPTIONS.verity_signer_path else: signer_path = prop_dict["verity_signer_cmd"] + signer_args = OPTIONS.verity_signer_args # make a tempdir tempdir_name = tempfile.mkdtemp(suffix="_verity_images") @@ -276,15 +301,22 @@ def MakeVerityEnabledImage(out_file, fec_supported, prop_dict): root_hash = prop_dict["verity_root_hash"] salt = prop_dict["verity_salt"] if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt, - block_dev, signer_path, signer_key): + block_dev, signer_path, signer_key, signer_args): shutil.rmtree(tempdir_name, ignore_errors=True) return False # build the full verified image + target_size = int(prop_dict["original_partition_size"]) + verity_size = int(prop_dict["verity_size"]) + + padding_size = target_size - image_size - verity_size + assert padding_size >= 0 + if not BuildVerifiedImage(out_file, verity_image_path, verity_metadata_path, verity_fec_path, + padding_size, fec_supported): shutil.rmtree(tempdir_name, ignore_errors=True) return False @@ -316,6 +348,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): Returns: True iff the image is built successfully. """ + print("BuildImage: in_dir = %s, out_file = %s" % (in_dir, out_file)) # system_root_image=true: build a system.img that combines the contents of # /system and the ramdisk, and can be mounted at the root of the file system. origin_in = in_dir @@ -355,20 +388,26 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): # verified. if verity_supported and is_verity_partition: partition_size = int(prop_dict.get("partition_size")) - adjusted_size = AdjustPartitionSizeForVerity(partition_size, - verity_fec_supported) + (adjusted_size, verity_size) = AdjustPartitionSizeForVerity(partition_size, + verity_fec_supported) if not adjusted_size: + print "Error: adjusting partition size for verity failed, partition_size = %d" % partition_size return False prop_dict["partition_size"] = str(adjusted_size) prop_dict["original_partition_size"] = str(partition_size) + prop_dict["verity_size"] = str(verity_size) if fs_type.startswith("ext"): build_command = ["mkuserimg.sh"] if "extfs_sparse_flag" in prop_dict: build_command.append(prop_dict["extfs_sparse_flag"]) - run_fsck = True - build_command.extend([in_dir, out_file, fs_type, - prop_dict["mount_point"]]) + #run_fsck = True + if "is_userdataextra" in prop_dict: + build_command.extend([in_dir, out_file, fs_type, + "data"]) + else: + build_command.extend([in_dir, out_file, fs_type, + prop_dict["mount_point"]]) build_command.append(prop_dict["partition_size"]) if "journal_size" in prop_dict: build_command.extend(["-j", prop_dict["journal_size"]]) @@ -383,9 +422,12 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): if "base_fs_file" in prop_dict: base_fs_file = ConvertBlockMapToBaseFs(prop_dict["base_fs_file"]) if base_fs_file is None: + print "Error: no base fs file found" return False build_command.extend(["-d", base_fs_file]) build_command.extend(["-L", prop_dict["mount_point"]]) + if "extfs_inode_count" in prop_dict: + build_command.extend(["-i", prop_dict["extfs_inode_count"]]) if "selinux_fc" in prop_dict: build_command.append(prop_dict["selinux_fc"]) elif fs_type.startswith("squash"): @@ -438,9 +480,12 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): try: if reserved_blocks and fs_type.startswith("ext4"): + print "fs type is ext4" (ext4fs_output, exit_code) = RunCommand(build_command) else: + print "fs type is not ext4" (_, exit_code) = RunCommand(build_command) + print("Running %s command, exit code = %d" % (build_command, exit_code)) finally: if in_dir != origin_in: # Clean up temporary directories and files. @@ -450,6 +495,7 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): if base_fs_file is not None: os.remove(base_fs_file) if exit_code != 0: + print "Error: %s command unsuccessful" % build_command return False # Bug: 21522719, 22023465 @@ -490,17 +536,19 @@ def BuildImage(in_dir, prop_dict, out_file, target_out=None): # create the verified image if this is to be verified if verity_supported and is_verity_partition: if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict): + print "Error: making verity enabled image failed" return False if run_fsck and prop_dict.get("skip_fsck") != "true": success, unsparse_image = UnsparseImage(out_file, replace=False) if not success: + print "Error: unparsing of image failed" return False # Run e2fsck on the inflated image file e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image] (_, exit_code) = RunCommand(e2fsck_command) - + print("Running %s command, exit code = %d" % (e2fsck_command, exit_code)) os.remove(unsparse_image) return exit_code == 0 @@ -556,6 +604,7 @@ def copy_prop(src_p, dest_p): copy_prop("system_squashfs_block_size", "squashfs_block_size") copy_prop("system_squashfs_disable_4k_align", "squashfs_disable_4k_align") copy_prop("system_base_fs_file", "base_fs_file") + copy_prop("system_extfs_inode_count", "extfs_inode_count") elif mount_point == "system_other": # We inherit the selinux policies of /system since we contain some of its files. d["mount_point"] = "system" @@ -569,11 +618,17 @@ def copy_prop(src_p, dest_p): copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt") copy_prop("system_squashfs_block_size", "squashfs_block_size") copy_prop("system_base_fs_file", "base_fs_file") + copy_prop("system_extfs_inode_count", "extfs_inode_count") elif mount_point == "data": # Copy the generic fs type first, override with specific one if available. copy_prop("fs_type", "fs_type") copy_prop("userdata_fs_type", "fs_type") copy_prop("userdata_size", "partition_size") + elif mount_point == "data_extra": + copy_prop("fs_type", "fs_type") + copy_prop("userdataextra_size", "partition_size") + copy_prop("userdataextra_name", "partition_name") + d["is_userdataextra"] = True elif mount_point == "cache": copy_prop("cache_fs_type", "fs_type") copy_prop("cache_size", "partition_size") @@ -588,11 +643,13 @@ def copy_prop(src_p, dest_p): copy_prop("vendor_squashfs_block_size", "squashfs_block_size") copy_prop("vendor_squashfs_disable_4k_align", "squashfs_disable_4k_align") copy_prop("vendor_base_fs_file", "base_fs_file") + copy_prop("vendor_extfs_inode_count", "extfs_inode_count") elif mount_point == "oem": copy_prop("fs_type", "fs_type") copy_prop("oem_size", "partition_size") copy_prop("oem_journal_size", "journal_size") copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks") + copy_prop("oem_extfs_inode_count", "extfs_inode_count") return d diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py index 3048488d30c..a6746c6a318 100755 --- a/tools/releasetools/check_target_files_signatures.py +++ b/tools/releasetools/check_target_files_signatures.py @@ -39,10 +39,12 @@ """ +from __future__ import print_function + import sys if sys.hexversion < 0x02070000: - print >> sys.stderr, "Python 2.7 or newer is required." + print("Python 2.7 or newer is required.", file=sys.stderr) sys.exit(1) import os @@ -53,6 +55,13 @@ import common + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + + # Work around a bug in python's zipfile module that prevents opening # of zipfiles if any entry has an extra field of between 1 and 3 bytes # (which is common with zipaligned APKs). This overrides the @@ -81,9 +90,9 @@ def Pop(): def Banner(msg): - print "-" * 70 - print " ", msg - print "-" * 70 + print("-" * 70) + print(" ", msg) + print("-" * 70) def GetCertSubject(cert): @@ -260,7 +269,7 @@ def CheckSharedUids(self): """Look for any instances where packages signed with different certs request the same sharedUserId.""" apks_by_uid = {} - for apk in self.apks.itervalues(): + for apk in self.apks.values(): if apk.shared_uid: apks_by_uid.setdefault(apk.shared_uid, []).append(apk) @@ -275,15 +284,15 @@ def CheckSharedUids(self): AddProblem("different cert sets for packages with uid %s" % (uid,)) - print "uid %s is shared by packages with different cert sets:" % (uid,) + print("uid %s is shared by packages with different cert sets:" % uid) for apk in apks: - print "%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename) + print("%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename)) for cert in apk.certs: - print " ", ALL_CERTS.Get(cert) - print + print(" ", ALL_CERTS.Get(cert)) + print() def CheckExternalSignatures(self): - for apk_filename, certname in self.certmap.iteritems(): + for apk_filename, certname in iteritems(self.certmap): if certname == "EXTERNAL": # Apps marked EXTERNAL should be signed with the test key # during development, then manually re-signed after @@ -299,25 +308,26 @@ def CheckExternalSignatures(self): def PrintCerts(self): """Display a table of packages grouped by cert.""" by_cert = {} - for apk in self.apks.itervalues(): + for apk in self.apks.values(): for cert in apk.certs: by_cert.setdefault(cert, []).append((apk.package, apk)) - order = [(-len(v), k) for (k, v) in by_cert.iteritems()] + order = [(-len(v), k) for (k, v) in iteritems(by_cert)] order.sort() for _, cert in order: - print "%s:" % (ALL_CERTS.Get(cert),) + print("%s:" % ALL_CERTS.Get(cert)) apks = by_cert[cert] apks.sort() for _, apk in apks: if apk.shared_uid: - print " %-*s %-*s [%s]" % (self.max_fn_len, apk.filename, + print(" %-*s %-*s [%s]" % (self.max_fn_len, apk.filename, self.max_pkg_len, apk.package, - apk.shared_uid) + apk.shared_uid)) else: - print " %-*s %s" % (self.max_fn_len, apk.filename, apk.package) - print + print(" %-*s %-*s" % (self.max_fn_len, apk.filename, + self.max_pkg_len, apk.package)) + print() def CompareWith(self, other): """Look for instances where a given package that exists in both @@ -338,12 +348,12 @@ def CompareWith(self, other): by_certpair.setdefault((other.apks[i].certs, self.apks[i].certs), []).append(i) else: - print "%s [%s]: new APK (not in comparison target_files)" % ( - i, self.apks[i].filename) + print("%s [%s]: new APK (not in comparison target_files)" % ( + i, self.apks[i].filename)) else: if i in other.apks: - print "%s [%s]: removed APK (only in comparison target_files)" % ( - i, other.apks[i].filename) + print("%s [%s]: removed APK (only in comparison target_files)" % ( + i, other.apks[i].filename)) if by_certpair: AddProblem("some APKs changed certs") @@ -351,23 +361,23 @@ def CompareWith(self, other): for (old, new), packages in sorted(by_certpair.items()): for i, o in enumerate(old): if i == 0: - print "was", ALL_CERTS.Get(o) + print("was", ALL_CERTS.Get(o)) else: - print " ", ALL_CERTS.Get(o) + print(" ", ALL_CERTS.Get(o)) for i, n in enumerate(new): if i == 0: - print "now", ALL_CERTS.Get(n) + print("now", ALL_CERTS.Get(n)) else: - print " ", ALL_CERTS.Get(n) + print(" ", ALL_CERTS.Get(n)) for i in sorted(packages): old_fn = other.apks[i].filename new_fn = self.apks[i].filename if old_fn == new_fn: - print " %-*s [%s]" % (max_pkg_len, i, old_fn) + print(" %-*s [%s]" % (max_pkg_len, i, old_fn)) else: - print " %-*s [was: %s; now: %s]" % (max_pkg_len, i, - old_fn, new_fn) - print + print(" %-*s [was: %s; now: %s]" % (max_pkg_len, i, + old_fn, new_fn)) + print() def main(argv): @@ -422,9 +432,9 @@ def option_handler(o, a): target_files.CompareWith(compare_files) if PROBLEMS: - print "%d problem(s) found:\n" % (len(PROBLEMS),) + print("%d problem(s) found:\n" % len(PROBLEMS)) for p in PROBLEMS: - print p + print(p) return 1 return 0 @@ -435,7 +445,7 @@ def option_handler(o, a): r = main(sys.argv[1:]) sys.exit(r) except common.ExternalError as e: - print - print " ERROR: %s" % (e,) - print + print() + print(" ERROR: %s" % e) + print() sys.exit(1) diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py old mode 100644 new mode 100755 index d7f8b16d3ca..59e3f30cb2b --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import copy import errno import getopt @@ -33,6 +35,17 @@ from hashlib import sha1 as sha1 +try: + raw_input +except NameError: + raw_input = input + + +def iteritems(obj): + if hasattr(obj, 'iteritems'): + return obj.iteritems() + return obj.items() + class Options(object): def __init__(self): @@ -46,7 +59,7 @@ def __init__(self): self.signapk_shared_library_path = "lib64" # Relative to search_path self.extra_signapk_args = [] self.java_path = "java" # Use the one on the path by default. - self.java_args = "-Xmx2048m" # JVM Args + self.java_args = ["-Xmx2048m"] # The default JVM args. self.public_key_suffix = ".x509.pem" self.private_key_suffix = ".pk8" # use otatools built boot_signer by default @@ -109,7 +122,7 @@ def Run(args, **kwargs): """Create and return a subprocess.Popen object, printing the command line on the terminal if -v was specified.""" if OPTIONS.verbose: - print " running: ", " ".join(args) + print(" running: ", " ".join(args)) return subprocess.Popen(args, **kwargs) @@ -216,8 +229,8 @@ def read_helper(fn): if os.path.exists(system_base_fs_file): d["system_base_fs_file"] = system_base_fs_file else: - print "Warning: failed to find system base fs file: %s" % ( - system_base_fs_file,) + print("Warning: failed to find system base fs file: %s" % ( + system_base_fs_file,)) del d["system_base_fs_file"] if "vendor_base_fs_file" in d: @@ -226,10 +239,13 @@ def read_helper(fn): if os.path.exists(vendor_base_fs_file): d["vendor_base_fs_file"] = vendor_base_fs_file else: - print "Warning: failed to find vendor base fs file: %s" % ( - vendor_base_fs_file,) + print("Warning: failed to find vendor base fs file: %s" % ( + vendor_base_fs_file,)) del d["vendor_base_fs_file"] + + if "device_type" not in d: + d["device_type"] = "MMC" try: data = read_helper("META/imagesizes.txt") for line in data.split("\n"): @@ -262,11 +278,11 @@ def makeint(key): system_root_image = d.get("system_root_image", None) == "true" if d.get("no_recovery", None) != "true": recovery_fstab_path = "RECOVERY/RAMDISK/etc/recovery.fstab" - d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"], + d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"], d["device_type"], recovery_fstab_path, system_root_image) elif d.get("recovery_as_boot", None) == "true": recovery_fstab_path = "BOOT/RAMDISK/etc/recovery.fstab" - d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"], + d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"], d["device_type"], recovery_fstab_path, system_root_image) else: d["fstab"] = None @@ -278,7 +294,7 @@ def LoadBuildProp(read_helper): try: data = read_helper("SYSTEM/build.prop") except KeyError: - print "Warning: could not find SYSTEM/build.prop in %s" % zip + print("Warning: could not find SYSTEM/build.prop in %s" % zip) data = "" return LoadDictionaryFromLines(data.split("\n")) @@ -293,7 +309,7 @@ def LoadDictionaryFromLines(lines): d[name] = value return d -def LoadRecoveryFSTab(read_helper, fstab_version, recovery_fstab_path, +def LoadRecoveryFSTab(read_helper, fstab_version, type, recovery_fstab_path, system_root_image=False): class Partition(object): def __init__(self, mount_point, fs_type, device, length, device2, context): @@ -307,7 +323,7 @@ def __init__(self, mount_point, fs_type, device, length, device2, context): try: data = read_helper(recovery_fstab_path) except KeyError: - print "Warning: could not find {}".format(recovery_fstab_path) + print("Warning: could not find %s" % format(recovery_fstab_path)) data = "" if fstab_version == 1: @@ -339,11 +355,12 @@ def __init__(self, mount_point, fs_type, device, length, device2, context): if i.startswith("length="): length = int(i[7:]) else: - print "%s: unknown option \"%s\"" % (mount_point, i) + print("%s: unknown option \"%s\"" % (mount_point, i)) - d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1], - device=pieces[2], length=length, - device2=device2) + if not d.get(mount_point): + d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1], + device=pieces[2], length=length, + device2=device2) elif fstab_version == 2: d = {} @@ -379,9 +396,10 @@ def __init__(self, mount_point, fs_type, device, length, device2, context): context = i mount_point = pieces[1] - d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2], - device=pieces[0], length=length, - device2=None, context=context) + if not d.get(mount_point): + d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2], + device=pieces[0], length=length, + device2=None, context=context) else: raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,)) @@ -397,17 +415,21 @@ def __init__(self, mount_point, fs_type, device, length, device2, context): def DumpInfoDict(d): for k, v in sorted(d.items()): - print "%-25s = (%s) %s" % (k, type(v).__name__, v) + print("%-25s = (%s) %s" % (k, type(v).__name__, v)) def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None, - has_ramdisk=False): + has_ramdisk=False, two_step_image=False): """Build a bootable image from the specified sourcedir. Take a kernel, cmdline, and optionally a ramdisk directory from the input (in - 'sourcedir'), and turn them into a boot image. Return the image data, or - None if sourcedir does not appear to contains files for building the - requested image.""" + 'sourcedir'), and turn them into a boot image. 'two_step_image' indicates if + we are building a two-step special image (i.e. building a recovery image to + be loaded into /boot in two-step OTAs). + + Return the image data, or None if sourcedir does not appear to contains files + for building the requested image. + """ def make_ramdisk(): ramdisk_img = tempfile.NamedTemporaryFile() @@ -437,68 +459,164 @@ def make_ramdisk(): info_dict = OPTIONS.info_dict img = tempfile.NamedTemporaryFile() + bootimg_key = os.getenv("PRODUCT_PRIVATE_KEY", None) + verity_key = os.getenv("PRODUCT_VERITY_KEY", None) + custom_boot_signer = os.getenv("PRODUCT_BOOT_SIGNER", None) if has_ramdisk: ramdisk_img = make_ramdisk() - # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set - mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" - - cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")] - - fn = os.path.join(sourcedir, "second") - if os.access(fn, os.F_OK): - cmd.append("--second") - cmd.append(fn) - - fn = os.path.join(sourcedir, "cmdline") - if os.access(fn, os.F_OK): - cmd.append("--cmdline") - cmd.append(open(fn).read().rstrip("\n")) - - fn = os.path.join(sourcedir, "base") - if os.access(fn, os.F_OK): - cmd.append("--base") - cmd.append(open(fn).read().rstrip("\n")) - - fn = os.path.join(sourcedir, "pagesize") + """check if uboot is requested""" + fn = os.path.join(sourcedir, "ubootargs") if os.access(fn, os.F_OK): - cmd.append("--pagesize") - cmd.append(open(fn).read().rstrip("\n")) - - args = info_dict.get("mkbootimg_args", None) - if args and args.strip(): - cmd.extend(shlex.split(args)) - - args = info_dict.get("mkbootimg_version_args", None) - if args and args.strip(): - cmd.extend(shlex.split(args)) - - if has_ramdisk: - cmd.extend(["--ramdisk", ramdisk_img.name]) - - img_unsigned = None - if info_dict.get("vboot", None): - img_unsigned = tempfile.NamedTemporaryFile() - cmd.extend(["--output", img_unsigned.name]) + cmd = ["mkimage"] + for argument in open(fn).read().rstrip("\n").split(" "): + cmd.append(argument) + cmd.append("-d") + cmd.append(os.path.join(sourcedir, "kernel") + ":" + ramdisk_img.name) + cmd.append(img.name) else: - cmd.extend(["--output", img.name]) + # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set + mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" + + cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")] + + fn = os.path.join(sourcedir, "second") + if os.access(fn, os.F_OK): + cmd.append("--second") + cmd.append(fn) + + fn = os.path.join(sourcedir, "cmdline") + if os.access(fn, os.F_OK): + cmd.append("--cmdline") + cmd.append(open(fn).read().rstrip("\n")) + + fn = os.path.join(sourcedir, "base") + if os.access(fn, os.F_OK): + cmd.append("--base") + cmd.append(open(fn).read().rstrip("\n")) + + fn = os.path.join(sourcedir, "tagsaddr") + if os.access(fn, os.F_OK): + cmd.append("--tags-addr") + cmd.append(open(fn).read().rstrip("\n")) + + fn = os.path.join(sourcedir, "tags_offset") + if os.access(fn, os.F_OK): + cmd.append("--tags_offset") + cmd.append(open(fn).read().rstrip("\n")) + + fn = os.path.join(sourcedir, "ramdisk_offset") + if os.access(fn, os.F_OK): + cmd.append("--ramdisk_offset") + cmd.append(open(fn).read().rstrip("\n")) + + fn = os.path.join(sourcedir, "dt") + if os.access(fn, os.F_OK): + cmd.append("--dt") + cmd.append(fn) + + fn = os.path.join(sourcedir, "pagesize") + if os.access(fn, os.F_OK): + kernel_pagesize = open(fn).read().rstrip("\n") + cmd.append("--pagesize") + cmd.append(kernel_pagesize) + + args = info_dict.get("mkbootimg_args", None) + if args and args.strip(): + cmd.extend(shlex.split(args)) + + args = info_dict.get("mkbootimg_version_args", None) + if args and args.strip(): + cmd.extend(shlex.split(args)) + + if has_ramdisk: + cmd.extend(["--ramdisk", ramdisk_img.name]) + + img_unsigned = None + if info_dict.get("vboot", None): + img_unsigned = tempfile.NamedTemporaryFile() + cmd.extend(["--output", img_unsigned.name]) + else: + cmd.extend(["--output", img.name]) p = Run(cmd, stdout=subprocess.PIPE) p.communicate() assert p.returncode == 0, "mkbootimg of %s image failed" % ( os.path.basename(sourcedir),) + if custom_boot_signer and bootimg_key and os.path.exists(bootimg_key): + print("Signing bootable image with custom boot signer...") + img_secure = tempfile.NamedTemporaryFile() + p = Run([custom_boot_signer, img.name, img_secure.name], stdout=subprocess.PIPE) + p.communicate() + assert p.returncode == 0, "signing of bootable image failed" + shutil.copyfile(img_secure.name, img.name) + img_secure.close() + elif bootimg_key and os.path.exists(bootimg_key) and kernel_pagesize > 0: + print("Signing bootable image...") + bootimg_key_passwords = {} + bootimg_key_passwords.update(PasswordManager().GetPasswords(bootimg_key.split())) + bootimg_key_password = bootimg_key_passwords[bootimg_key] + if bootimg_key_password is not None: + bootimg_key_password += "\n" + img_sha256 = tempfile.NamedTemporaryFile() + img_sig = tempfile.NamedTemporaryFile() + img_sig_padded = tempfile.NamedTemporaryFile() + img_secure = tempfile.NamedTemporaryFile() + p = Run(["openssl", "dgst", "-sha256", "-binary", "-out", img_sha256.name, img.name], + stdout=subprocess.PIPE) + p.communicate() + assert p.returncode == 0, "signing of bootable image failed" + p = Run(["openssl", "rsautl", "-sign", "-in", img_sha256.name, "-inkey", bootimg_key, "-out", + img_sig.name, "-passin", "stdin"], stdin=subprocess.PIPE, stdout=subprocess.PIPE) + p.communicate(bootimg_key_password) + assert p.returncode == 0, "signing of bootable image failed" + p = Run(["dd", "if=/dev/zero", "of=%s" % img_sig_padded.name, "bs=%s" % kernel_pagesize, + "count=1"], stdout=subprocess.PIPE) + p.communicate() + assert p.returncode == 0, "signing of bootable image failed" + p = Run(["dd", "if=%s" % img_sig.name, "of=%s" % img_sig_padded.name, "conv=notrunc"], + stdout=subprocess.PIPE) + p.communicate() + assert p.returncode == 0, "signing of bootable image failed" + p = Run(["cat", img.name, img_sig_padded.name], stdout=img_secure.file.fileno()) + p.communicate() + assert p.returncode == 0, "signing of bootable image failed" + shutil.copyfile(img_secure.name, img.name) + img_sha256.close() + img_sig.close() + img_sig_padded.close() + img_secure.close() + if (info_dict.get("boot_signer", None) == "true" and info_dict.get("verity_key", None)): - path = "/" + os.path.basename(sourcedir).lower() + # Hard-code the path as "/boot" for two-step special recovery image (which + # will be loaded into /boot during the two-step OTA). + if two_step_image: + path = "/boot" + else: + path = "/" + os.path.basename(sourcedir).lower() cmd = [OPTIONS.boot_signer_path] cmd.extend(OPTIONS.boot_signer_args) cmd.extend([path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name]) - p = Run(cmd, stdout=subprocess.PIPE) - p.communicate() + verity_key_password = None + + if verity_key and os.path.exists(verity_key+".pk8") and kernel_pagesize > 0: + verity_key_passwords = {} + verity_key_passwords.update(PasswordManager().GetPasswords(verity_key.split())) + verity_key_password = verity_key_passwords[verity_key] + + if verity_key_password is not None: + verity_key_password += "\n" + p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + p.communicate(verity_key_password) + else: + p = Run(cmd) + p.communicate() + assert p.returncode == 0, "boot_signer of %s image failed" % path # Sign the image if vboot is non-empty. @@ -530,7 +648,7 @@ def make_ramdisk(): def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir, - info_dict=None): + info_dict=None, two_step_image=False): """Return a File object with the desired bootable image. Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name', @@ -539,15 +657,15 @@ def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir, prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name) if os.path.exists(prebuilt_path): - print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,) + print("using prebuilt %s from BOOTABLE_IMAGES..." % prebuilt_name) return File.FromLocalFile(name, prebuilt_path) prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name) if os.path.exists(prebuilt_path): - print "using prebuilt %s from IMAGES..." % (prebuilt_name,) + print("using prebuilt %s from IMAGES..." % prebuilt_name) return File.FromLocalFile(name, prebuilt_path) - print "building image from target_files %s..." % (tree_subdir,) + print("building image from target_files %s..." % tree_subdir) if info_dict is None: info_dict = OPTIONS.info_dict @@ -562,7 +680,7 @@ def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir, fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt" data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir), os.path.join(unpack_dir, fs_config), - info_dict, has_ramdisk) + info_dict, has_ramdisk, two_step_image) if data: return File(name, data) return None @@ -582,6 +700,7 @@ def UnzipTemp(filename, pattern=None): OPTIONS.tempfiles.append(tmp) def unzip_to_dir(filename, dirname): + subprocess.call(["rm", "-rf", dirname + filename, "targetfiles-*"]) cmd = ["unzip", "-o", "-q", filename, "-d", dirname] if pattern is not None: cmd.append(pattern) @@ -636,7 +755,7 @@ def GetKeyPasswords(keylist): if p.returncode == 0: # Encrypted key with empty string as password. key_passwords[k] = '' - elif stderr.startswith('Error decrypting key'): + elif stderr.startswith(b'Error decrypting key'): # Definitely encrypted key. # It would have said "Error reading key" if it didn't parse correctly. need_passwords.append(k) @@ -710,11 +829,10 @@ def SignFile(input_name, output_name, key, password, min_api_level=None, java_library_path = os.path.join( OPTIONS.search_path, OPTIONS.signapk_shared_library_path) - cmd = [OPTIONS.java_path, OPTIONS.java_args, - "-Djava.library.path=" + java_library_path, - "-jar", - os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)] - cmd.extend(OPTIONS.extra_signapk_args) + cmd = ([OPTIONS.java_path] + OPTIONS.java_args + + ["-Djava.library.path=" + java_library_path, + "-jar", os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)] + + OPTIONS.extra_signapk_args) if whole_file: cmd.append("-w") @@ -750,6 +868,8 @@ def CheckSize(data, target, info_dict): fs_type = None limit = None if info_dict["fstab"]: + if mount_point == "/userdata_extra": + mount_point = "/data" if mount_point == "/userdata": mount_point = "/data" p = info_dict["fstab"][mount_point] @@ -771,11 +891,11 @@ def CheckSize(data, target, info_dict): if pct >= 99.0: raise ExternalError(msg) elif pct >= 95.0: - print - print " WARNING: ", msg - print + print() + print(" WARNING: ", msg) + print() elif OPTIONS.verbose: - print " ", msg + print(" ", msg) def ReadApkCerts(tf_zip): @@ -824,8 +944,8 @@ def ReadApkCerts(tf_zip): """ def Usage(docstring): - print docstring.rstrip("\n") - print COMMON_DOCSTRING + print(docstring.rstrip("\n")) + print(COMMON_DOCSTRING) def ParseOptions(argv, @@ -850,7 +970,7 @@ def ParseOptions(argv, list(extra_long_opts)) except getopt.GetoptError as err: Usage(docstring) - print "**", str(err), "**" + print("**", str(err), "**") sys.exit(2) for o, a in opts: @@ -870,7 +990,7 @@ def ParseOptions(argv, elif o in ("--java_path",): OPTIONS.java_path = a elif o in ("--java_args",): - OPTIONS.java_args = a + OPTIONS.java_args = shlex.split(a) elif o in ("--public_key_suffix",): OPTIONS.public_key_suffix = a elif o in ("--private_key_suffix",): @@ -920,6 +1040,7 @@ class PasswordManager(object): def __init__(self): self.editor = os.getenv("EDITOR", None) self.pwfile = os.getenv("ANDROID_PW_FILE", None) + self.secure_storage_cmd = os.getenv("ANDROID_SECURE_STORAGE_CMD", None) def GetPasswords(self, items): """Get passwords corresponding to each string in 'items', @@ -939,16 +1060,30 @@ def GetPasswords(self, items): missing = [] for i in items: if i not in current or not current[i]: - missing.append(i) + #Attempt to load using ANDROID_SECURE_STORAGE_CMD + if self.secure_storage_cmd: + try: + os.environ["TMP__KEY_FILE_NAME"] = str(i) + ps = subprocess.Popen(self.secure_storage_cmd, shell=True, stdout=subprocess.PIPE) + output = ps.communicate()[0] + if ps.returncode == 0: + current[i] = output + except Exception as e: + print(e) + pass + if i not in current or not current[i]: + missing.append(i) # Are all the passwords already in the file? if not missing: + if "ANDROID_SECURE_STORAGE_CMD" in os.environ: + del os.environ["ANDROID_SECURE_STORAGE_CMD"] return current for i in missing: current[i] = "" if not first: - print "key file %s still missing some passwords." % (self.pwfile,) + print("key file %s still missing some passwords." % self.pwfile) answer = raw_input("try to edit again? [y]> ").strip() if answer and answer[0] not in 'yY': raise RuntimeError("key passwords unavailable") @@ -962,7 +1097,7 @@ def PromptResult(self, current): # pylint: disable=no-self-use values. """ result = {} - for k, v in sorted(current.iteritems()): + for k, v in sorted(iteritems(current)): if v: result[k] = v else: @@ -983,7 +1118,7 @@ def UpdateAndReadFile(self, current): f.write("# (Additional spaces are harmless.)\n\n") first_line = None - sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()]) + sorted_list = sorted((not v, k, v) for (k, v) in current.items()) for i, (_, k, v) in enumerate(sorted_list): f.write("[[[ %s ]]] %s\n" % (v, k)) if not v and first_line is None: @@ -1008,13 +1143,13 @@ def ReadFile(self): continue m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line) if not m: - print "failed to parse password file: ", line + print("failed to parse password file: ", line) else: result[m.group(2)] = m.group(1) f.close() except IOError as e: if e.errno != errno.ENOENT: - print "error reading password file: ", str(e) + print("error reading password file: ", str(e)) return result @@ -1118,7 +1253,7 @@ def __init__(self, **kwargs): """Keyword arguments to the constructor become attributes of this object, which is passed to all functions in the device-specific module.""" - for k, v in kwargs.iteritems(): + for k, v in iteritems(kwargs): setattr(self, k, v) self.extras = OPTIONS.extras @@ -1135,10 +1270,10 @@ def __init__(self, **kwargs): if x == ".py": f = b info = imp.find_module(f, [d]) - print "loaded device-specific extensions from", path + print("loaded device-specific extensions from", path) self.module = imp.load_module("device_specific", *info) except ImportError: - print "unable to load device-specific module; assuming none" + print("unable to load device-specific module; assuming none") def _DoCall(self, function_name, *args, **kwargs): """Call the named function in the device-specific module, passing @@ -1165,6 +1300,11 @@ def FullOTA_InstallEnd(self): used to install the image for the device's baseband processor.""" return self._DoCall("FullOTA_InstallEnd") + def FullOTA_PostValidate(self): + """Called after installing and validating /system; typically this is + used to resize the system partition after a block based installation.""" + return self._DoCall("FullOTA_PostValidate") + def IncrementalOTA_Assertions(self): """Called after emitting the block of assertions at the top of an incremental OTA package. Implementations can add whatever @@ -1270,9 +1410,9 @@ def run(): err.append(e) th = threading.Thread(target=run) th.start() - th.join(timeout=300) # 5 mins + th.join(timeout=600) # 10 mins if th.is_alive(): - print "WARNING: diff command timed out" + print("WARNING: diff command timed out") p.terminate() th.join(5) if th.is_alive(): @@ -1280,8 +1420,8 @@ def run(): th.join() if err or p.returncode != 0: - print "WARNING: failure running %s:\n%s\n" % ( - diff_program, "".join(err)) + print("WARNING: failure running %s:\n%s\n" % ( + cmd, "".join(err))) self.patch = None return None, None, None diff = ptemp.read() @@ -1303,7 +1443,7 @@ def GetPatch(self): def ComputeDifferences(diffs): """Call ComputePatch on all the Difference objects in 'diffs'.""" - print len(diffs), "diffs to compute" + print(len(diffs), "diffs to compute") # Do the largest files first, to try and reduce the long-pole effect. by_size = [(i.tf.size, i) for i in diffs] @@ -1329,13 +1469,13 @@ def worker(): else: name = "%s (%s)" % (tf.name, sf.name) if patch is None: - print "patching failed! %s" % (name,) + print("patching failed! %s" % name) else: - print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % ( - dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name) + print("%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % ( + dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)) lock.release() except Exception as e: - print e + print(e) raise # start worker threads; wait for them all to finish. @@ -1591,7 +1731,11 @@ def _HashZeroBlocks(self, num_blocks): # pylint: disable=no-self-use "ext4": "EMMC", "emmc": "EMMC", "f2fs": "EMMC", - "squashfs": "EMMC" + "squashfs": "EMMC", + "ext2": "EMMC", + "ext3": "EMMC", + "vfat": "EMMC", + "osip": "OSIP" } def GetTypeAndDevice(mount_point, info): @@ -1605,16 +1749,18 @@ def GetTypeAndDevice(mount_point, info): def ParseCertificate(data): """Parse a PEM-format certificate.""" + from codecs import decode cert = [] save = False for line in data.split("\n"): if "--END CERTIFICATE--" in line: break if save: - cert.append(line) + l = line.encode() if hasattr(line, 'encode') else line + cert.append(l) if "--BEGIN CERTIFICATE--" in line: save = True - cert = "".join(cert).decode('base64') + cert = decode(b"".join(cert), 'base64') return cert def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, @@ -1636,14 +1782,18 @@ def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, full_recovery_image = info_dict.get("full_recovery_image", None) == "true" system_root_image = info_dict.get("system_root_image", None) == "true" + use_bsdiff = info_dict.get("no_gzip_recovery_ramdisk", None) == "true" if full_recovery_image: output_sink("etc/recovery.img", recovery_img.data) else: - diff_program = ["imgdiff"] + if use_bsdiff: + diff_program = ["bsdiff"] + else: + diff_program = ["imgdiff"] path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat") - if os.path.exists(path): + if os.path.exists(path) and not use_bsdiff: diff_program.append("-b") diff_program.append(path) bonus_args = "-b /system/etc/recovery-resource.dat" @@ -1675,6 +1825,9 @@ def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, 'size': recovery_img.size} else: sh = """#!/system/bin/sh +if [ -f /system/etc/recovery-transform.sh ]; then + exec sh /system/etc/recovery-transform.sh %(recovery_size)d %(recovery_sha1)s %(boot_size)d %(boot_sha1)s +fi if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed" else @@ -1716,6 +1869,6 @@ def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img, if found: break - print "putting script in", sh_location + print("putting script in", sh_location) output_sink(sh_location, sh) diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py index 2ecc5cbb86d..a1c50cd9db8 100644 --- a/tools/releasetools/edify_generator.py +++ b/tools/releasetools/edify_generator.py @@ -132,20 +132,47 @@ def AssertOlderBuild(self, timestamp, timestamp_text): def AssertDevice(self, device): """Assert that the device identifier is the given string.""" - cmd = ('getprop("ro.product.device") == "%s" || ' - 'abort("E%d: This package is for \\"%s\\" devices; ' - 'this is a \\"" + getprop("ro.product.device") + "\\".");') % ( - device, common.ErrorCode.DEVICE_MISMATCH, device) + cmd = ('assert(' + + ' || '.join(['getprop("ro.product.device") == "%s" || getprop("ro.build.product") == "%s"' + % (i, i) for i in device.split(",")]) + + ' || abort("E%d: This package is for device: %s; ' + + 'this device is " + getprop("ro.product.device") + ".");' + + ');') % (common.ErrorCode.DEVICE_MISMATCH, device) self.script.append(cmd) def AssertSomeBootloader(self, *bootloaders): - """Asert that the bootloader version is one of *bootloaders.""" + """Assert that the bootloader version is one of *bootloaders.""" cmd = ("assert(" + - " ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,) + " || ".join(['getprop("ro.bootloader") == "%s"' % (b,) for b in bootloaders]) + + ' || abort("This package supports bootloader(s): ' + + ", ".join(["%s" % (b,) for b in bootloaders]) + + '; this device has bootloader " + getprop("ro.bootloader") + ".");' + ");") self.script.append(self.WordWrap(cmd)) + def AssertSomeBaseband(self, *basebands): + """Assert that the baseband version is one of *basebands.""" + cmd = ("assert(" + + " || ".join(['getprop("ro.baseband") == "%s"' % (b,) + for b in basebands]) + + ' || abort("This package supports baseband(s): ' + + ", ".join(["%s" % (b,) for b in basebands]) + + '; this device has baseband " + getprop("ro.baseband") + ".");' + + ");") + self.script.append(self.WordWrap(cmd)) + + def RunBackup(self, command): + self.script.append(('run_program("/tmp/install/bin/backuptool.sh", "%s");' % command)) + + def ValidateSignatures(self, command): + self.script.append('package_extract_file("META-INF/org/aicp/releasekey", "/tmp/releasekey");') + # Exit code 124 == abort. run_program returns raw, so left-shift 8bit + self.script.append('run_program("/tmp/install/bin/otasigcheck.sh") != "31744" || abort("Can\'t install this package on top of incompatible data. Please try another package or run a factory reset");') + + def RunPersist(self, command): + self.script.append(('run_program("/tmp/install/bin/persist.sh", "%s");' % command)) + def ShowProgress(self, frac, dur): """Update the progress bar, advancing it over 'frac' over the next 'dur' seconds. 'dur' may be zero to advance it via SetProgress @@ -217,6 +244,12 @@ def Mount(self, mount_point, mount_options_by_format=""): p.mount_point, mount_flags)) self.mounts.add(p.mount_point) + def Unmount(self, mount_point): + """Unmount the partition with the given mount_point.""" + if mount_point in self.mounts: + self.mounts.remove(mount_point) + self.script.append('unmount("%s");' % (mount_point,)) + def UnpackPackageDir(self, src, dst): """Unpack a given directory from the OTA package into the given destination directory.""" @@ -322,6 +355,10 @@ def WriteRawImage(self, mount_point, fn, mapfn=None): self.script.append( 'write_raw_image(package_extract_file("%(fn)s"), "%(device)s");' % args) + elif partition_type == "OSIP": + self.script.append( + 'write_osip_image(package_extract_file("%(fn)s"), "%(device)s");' + % args) elif partition_type == "EMMC": if mapfn: args["map"] = mapfn @@ -339,10 +376,10 @@ def SetPermissions(self, fn, uid, gid, mode, selabel, capabilities): if not self.info.get("use_set_metadata", False): self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn)) else: - if capabilities is None: - capabilities = "0x0" - cmd = 'set_metadata("%s", "uid", %d, "gid", %d, "mode", 0%o, ' \ - '"capabilities", %s' % (fn, uid, gid, mode, capabilities) + cmd = 'set_metadata("%s", "uid", %d, "gid", %d, "mode", 0%o' \ + % (fn, uid, gid, mode) + if capabilities is not None: + cmd += ', "capabilities", %s' % ( capabilities ) if selabel is not None: cmd += ', "selabel", "%s"' % selabel cmd += ');' @@ -355,11 +392,11 @@ def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode, selabel, self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");' % (uid, gid, dmode, fmode, fn)) else: - if capabilities is None: - capabilities = "0x0" cmd = 'set_metadata_recursive("%s", "uid", %d, "gid", %d, ' \ - '"dmode", 0%o, "fmode", 0%o, "capabilities", %s' \ - % (fn, uid, gid, dmode, fmode, capabilities) + '"dmode", 0%o, "fmode", 0%o' \ + % (fn, uid, gid, dmode, fmode) + if capabilities is not None: + cmd += ', "capabilities", "%s"' % ( capabilities ) if selabel is not None: cmd += ', "selabel", "%s"' % selabel cmd += ');' @@ -371,7 +408,7 @@ def MakeSymlinks(self, symlink_list): for d, l in symlink_list: by_dest.setdefault(d, []).append(l) - for dest, links in sorted(by_dest.iteritems()): + for dest, links in sorted(by_dest.items()): cmd = ('symlink("%s", ' % (dest,) + ",\0".join(['"' + i + '"' for i in sorted(links)]) + ");") self.script.append(self.WordWrap(cmd)) diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py index aa21d7eab9f..da66a7f40b9 100755 --- a/tools/releasetools/img_from_target_files.py +++ b/tools/releasetools/img_from_target_files.py @@ -26,14 +26,20 @@ """ +from __future__ import print_function + import sys if sys.hexversion < 0x02070000: - print >> sys.stderr, "Python 2.7 or newer is required." + print("Python 2.7 or newer is required.", file=sys.stderr) sys.exit(1) +import errno import os +import re import shutil +import subprocess +import tempfile import zipfile import common @@ -47,6 +53,31 @@ def CopyInfo(output_zip): output_zip, os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"), "android-info.txt") +def AddRadio(output_zip): + """If they exist, add RADIO files to the output.""" + if os.path.isdir(os.path.join(OPTIONS.input_tmp, "RADIO")): + for radio_root, radio_dirs, radio_files in os.walk(os.path.join(OPTIONS.input_tmp, "RADIO")): + for radio_file in radio_files: + output_zip.write(os.path.join(radio_root, radio_file), radio_file) + + # If a filesmap file exists, create a script to flash the radio images based on it + filesmap = os.path.join(OPTIONS.input_tmp, "RADIO/filesmap") + if os.path.isfile(filesmap): + print("creating flash-radio.sh...") + filesmap_data = open(filesmap, "r") + filesmap_regex = re.compile(r'^(\S+)\s\S+\/by-name\/(\S+).*') + tmp_flash_radio = tempfile.NamedTemporaryFile() + tmp_flash_radio.write("#!/bin/sh\n\n") + for filesmap_line in filesmap_data: + filesmap_entry = filesmap_regex.search(filesmap_line) + if filesmap_entry: + tmp_flash_radio.write("fastboot flash %s %s\n" % (filesmap_entry.group(2), filesmap_entry.group(1))) + tmp_flash_radio.flush() + if os.path.getsize(tmp_flash_radio.name) > 0: + output_zip.write(tmp_flash_radio.name, "flash-radio.sh") + else: + print("flash-radio.sh is empty, skipping...") + tmp_flash_radio.close() def main(argv): bootable_only = [False] @@ -72,6 +103,7 @@ def option_handler(o, _): OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0]) output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED) CopyInfo(output_zip) + AddRadio(output_zip) try: done = False @@ -79,6 +111,7 @@ def option_handler(o, _): if os.path.exists(images_path): # If this is a new target-files, it already contains the images, # and all we have to do is copy them to the output zip. + # Skip oem.img files since they are not needed in fastboot images. images = os.listdir(images_path) if images: for image in images: @@ -86,6 +119,10 @@ def option_handler(o, _): continue if not image.endswith(".img"): continue + if image == "oem.img": + continue + if image == "recovery-two-step.img": + continue common.ZipWrite( output_zip, os.path.join(images_path, image), image) done = True @@ -109,7 +146,7 @@ def option_handler(o, _): recovery_image.AddToZip(output_zip) def banner(s): - print "\n\n++++ " + s + " ++++\n\n" + print("\n\n++++ " + s + " ++++\n\n") if not bootable_only: banner("AddSystem") @@ -122,15 +159,17 @@ def banner(s): pass # no vendor partition for this device banner("AddUserdata") add_img_to_target_files.AddUserdata(output_zip, prefix="") + banner("AddUserdataExtra") + add_img_to_target_files.AddUserdataExtra(output_zip, prefix="") banner("AddCache") add_img_to_target_files.AddCache(output_zip, prefix="") finally: - print "cleaning up..." + print("cleaning up...") common.ZipClose(output_zip) shutil.rmtree(OPTIONS.input_tmp) - print "done." + print("done.") if __name__ == '__main__': @@ -138,7 +177,7 @@ def banner(s): common.CloseInheritedPipes() main(sys.argv[1:]) except common.ExternalError as e: - print - print " ERROR: %s" % (e,) - print + print() + print(" ERROR: %s" % e) + print() sys.exit(1) diff --git a/tools/releasetools/make_recovery_patch.py b/tools/releasetools/make_recovery_patch.py index 08d14500886..7c6007e5f31 100755 --- a/tools/releasetools/make_recovery_patch.py +++ b/tools/releasetools/make_recovery_patch.py @@ -14,10 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import sys if sys.hexversion < 0x02070000: - print >> sys.stderr, "Python 2.7 or newer is required." + print("Python 2.7 or newer is required.", file=sys.stderr) sys.exit(1) import os diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index d3d4974ff7b..6a55a4160fd 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -124,6 +124,10 @@ --payload_signer_args Specify the arguments needed for payload signer. + + --backup + Enable or disable the execution of backuptool.sh. + Disabled by default. """ import sys @@ -174,6 +178,9 @@ OPTIONS.log_diff = None OPTIONS.payload_signer = None OPTIONS.payload_signer_args = [] +OPTIONS.backuptool = False +OPTIONS.override_device = 'auto' +OPTIONS.override_prop = False def MostPopularKey(d, default): """Given a dict, return the key corresponding to the largest @@ -454,7 +461,10 @@ def SignOutput(temp_zip_name, output_zip_name): def AppendAssertions(script, info_dict, oem_dict=None): oem_props = info_dict.get("oem_fingerprint_properties") if oem_props is None or len(oem_props) == 0: - device = GetBuildProp("ro.product.device", info_dict) + if OPTIONS.override_device == "auto": + device = GetBuildProp("ro.product.device", info_dict) + else: + device = OPTIONS.override_device script.AssertDevice(device) else: if oem_dict is None: @@ -467,6 +477,39 @@ def AppendAssertions(script, info_dict, oem_dict=None): script.AssertOemProperty(prop, oem_dict.get(prop)) +def _WriteRecoveryImageToBoot(script, output_zip): + """Find and write recovery image to /boot in two-step OTA. + + In two-step OTAs, we write recovery image to /boot as the first step so that + we can reboot to there and install a new recovery image to /recovery. + A special "recovery-two-step.img" will be preferred, which encodes the correct + path of "/boot". Otherwise the device may show "device is corrupt" message + when booting into /boot. + + Fall back to using the regular recovery.img if the two-step recovery image + doesn't exist. Note that rebuilding the special image at this point may be + infeasible, because we don't have the desired boot signer and keys when + calling ota_from_target_files.py. + """ + + recovery_two_step_img_name = "recovery-two-step.img" + recovery_two_step_img_path = os.path.join( + OPTIONS.input_tmp, "IMAGES", recovery_two_step_img_name) + if os.path.exists(recovery_two_step_img_path): + recovery_two_step_img = common.GetBootableImage( + recovery_two_step_img_name, recovery_two_step_img_name, + OPTIONS.input_tmp, "RECOVERY") + common.ZipWriteStr( + output_zip, recovery_two_step_img_name, recovery_two_step_img.data) + print "two-step package: using %s in stage 1/3" % ( + recovery_two_step_img_name,) + script.WriteRawImage("/boot", recovery_two_step_img_name) + else: + print "two-step package: using recovery.img in stage 1/3" + # The "recovery.img" entry has been written into package earlier. + script.WriteRawImage("/boot", "recovery.img") + + def HasRecoveryPatch(target_files_zip): namelist = [name for name in target_files_zip.namelist()] return ("SYSTEM/recovery-from-boot.p" in namelist or @@ -486,6 +529,8 @@ def GetOemProperty(name, oem_props, oem_dict, info_dict): def CalculateFingerprint(oem_props, oem_dict, info_dict): + if OPTIONS.override_prop: + return GetBuildProp("ro.build.date.utc", info_dict) if oem_props is None: return GetBuildProp("ro.build.fingerprint", info_dict) return "%s/%s/%s:%s" % ( @@ -535,6 +580,15 @@ def GetImage(which, tmpdir, info_dict): return sparse_img.SparseImage(path, mappath, clobbered_blocks) +def CopyInstallTools(output_zip): + install_path = os.path.join(OPTIONS.input_tmp, "INSTALL") + for root, subdirs, files in os.walk(install_path): + for f in files: + install_source = os.path.join(root, f) + install_target = os.path.join("install", os.path.relpath(root, install_path), f) + output_zip.write(install_source, install_target) + + def WriteFullOTAPackage(input_zip, output_zip): # TODO: how to determine this? We don't know what version it will # be installed on top of. For now, we expect the API just won't @@ -553,13 +607,18 @@ def WriteFullOTAPackage(input_zip, output_zip): oem_dict = common.LoadDictionaryFromLines( open(OPTIONS.oem_source).readlines()) - metadata = { - "post-build": CalculateFingerprint(oem_props, oem_dict, - OPTIONS.info_dict), - "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, - OPTIONS.info_dict), - "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict), - } + if OPTIONS.override_prop: + metadata = { + "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict), + } + else: + metadata = { + "post-build": CalculateFingerprint(oem_props, oem_dict, + OPTIONS.info_dict), + "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, + OPTIONS.info_dict), + "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict), + } device_specific = common.DeviceSpecificParams( input_zip=input_zip, @@ -575,10 +634,10 @@ def WriteFullOTAPackage(input_zip, output_zip): metadata["ota-type"] = "BLOCK" if block_based else "FILE" - if not OPTIONS.omit_prereq: - ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict) - ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict) - script.AssertOlderBuild(ts, ts_text) + #if not OPTIONS.omit_prereq: + # ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict) + # ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict) + # script.AssertOlderBuild(ts, ts_text) AppendAssertions(script, OPTIONS.info_dict, oem_dict) device_specific.FullOTA_Assertions() @@ -616,6 +675,9 @@ def WriteFullOTAPackage(input_zip, output_zip): script.AppendExtra(""" if get_stage("%(bcb_dev)s") == "2/3" then """ % bcb_dev) + + # Stage 2/3: Write recovery image to /recovery (currently running /boot). + script.Comment("Stage 2/3") script.WriteRawImage("/recovery", "recovery.img") script.AppendExtra(""" set_stage("%(bcb_dev)s", "3/3"); @@ -623,21 +685,77 @@ def WriteFullOTAPackage(input_zip, output_zip): else if get_stage("%(bcb_dev)s") == "3/3" then """ % bcb_dev) + # Stage 3/3: Make changes. + script.Comment("Stage 3/3") + # Dump fingerprints script.Print("Target: %s" % CalculateFingerprint( oem_props, oem_dict, OPTIONS.info_dict)) + script.AppendExtra("ifelse(is_mounted(\"/system\"), unmount(\"/system\"));") device_specific.FullOTA_InstallBegin() + CopyInstallTools(output_zip) + script.UnpackPackageDir("install", "/tmp/install") + script.SetPermissionsRecursive("/tmp/install", 0, 0, 0755, 0644, None, None) + script.SetPermissionsRecursive("/tmp/install/bin", 0, 0, 0755, 0755, None, None) + + if OPTIONS.backuptool: + script.Mount("/system") + script.RunBackup("backup") + script.RunPersist("save") + script.Unmount("/system") + system_progress = 0.75 + script.Print("*********************************************"); + script.Print("* ##### ####**** ########**** *") + script.Print("* ############**** ##########****** *") + script.Print("* #### ####****##### **** **** *") + script.Print("* ### ###****#### **** **** *") + script.Print("* #### ####****#### **** **** *") + script.Print("* ############**** ##########****** *") + script.Print("* ##### ####**** ########**** *") + script.Print("* **** *") + script.Print("* **** *") + script.Print("* www.aicp-rom.com - Get your flash ON *") + script.Print("*********************************************"); + + + build = GetBuildProp("ro.aicp.version", OPTIONS.info_dict) + if os.getenv("AICP_BUILDTYPE") is not None: + script.Print("* Version: %s"%(build)); + elif os.getenv("AICP_NIGHTLY") is not None: + script.Print("* Version: %s"%(build)); + else: + build = GetBuildProp("ro.build.date", OPTIONS.info_dict) + script.Print("*********************************************"); + script.Print("************* AICP BUILD *************"); + script.Print("*********************************************"); + script.Print("* Compiled: %s"%(build)); + + device = GetBuildProp("ro.product.device", OPTIONS.info_dict) + if GetBuildProp("ro.product.model", OPTIONS.info_dict) is not None: + model = GetBuildProp("ro.product.model", OPTIONS.info_dict) + script.Print("* Device: %s (%s)"%(model, device)); + else: + script.Print("* Device: %s"%(device)); + script.Print("*********************************************"); + if OPTIONS.wipe_user_data: system_progress -= 0.1 if HasVendorPartition(input_zip): system_progress -= 0.1 - # Place a copy of file_contexts.bin into the OTA package which will be used - # by the recovery program. + if not OPTIONS.wipe_user_data: + script.AppendExtra("if is_mounted(\"/data\") then") + script.ValidateSignatures("data") + script.AppendExtra("else") + script.Mount("/data") + script.ValidateSignatures("data") + script.Unmount("/data") + script.AppendExtra("endif;") + if "selinux_fc" in OPTIONS.info_dict: WritePolicyConfig(OPTIONS.info_dict["selinux_fc"], output_zip) @@ -702,6 +820,17 @@ def output_sink(fn, data): common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict) common.ZipWriteStr(output_zip, "boot.img", boot_img.data) + device_specific.FullOTA_PostValidate() + + if OPTIONS.backuptool: + script.ShowProgress(0.02, 10) + if block_based: + script.Mount("/system") + script.RunBackup("restore") + script.RunPersist("restore") + if block_based: + script.Unmount("/system") + script.ShowProgress(0.05, 5) script.WriteRawImage("/boot", "boot.img") @@ -722,7 +851,11 @@ def output_sink(fn, data): set_stage("%(bcb_dev)s", ""); """ % bcb_dev) script.AppendExtra("else\n") - script.WriteRawImage("/boot", "recovery.img") + + # Stage 1/3: Nothing to verify for full OTA. Write recovery image to /boot. + script.Comment("Stage 1/3") + _WriteRecoveryImageToBoot(script, output_zip) + script.AppendExtra(""" set_stage("%(bcb_dev)s", "2/3"); reboot_now("%(bcb_dev)s", ""); @@ -735,6 +868,11 @@ def output_sink(fn, data): metadata["ota-required-cache"] = str(script.required_cache) WriteMetadata(metadata, output_zip) + common.ZipWriteStr(output_zip, "system/build.prop", + ""+input_zip.read("SYSTEM/build.prop")) + + common.ZipWriteStr(output_zip, "META-INF/org/lineageos/releasekey", + ""+input_zip.read("META/releasekey.txt")) def WritePolicyConfig(file_name, output_zip): common.ZipWrite(output_zip, file_name, os.path.basename(file_name)) @@ -765,7 +903,8 @@ def GetBuildProp(prop, info_dict): try: return info_dict.get("build.prop", {})[prop] except KeyError: - raise common.ExternalError("couldn't find %s in build.prop" % (prop,)) + print ("WARNING: could not find %s in build.prop" % (prop,)) + return None def AddToKnownPaths(filename, known_paths): @@ -805,11 +944,16 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip): oem_dict = common.LoadDictionaryFromLines( open(OPTIONS.oem_source).readlines()) - metadata = { - "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, - OPTIONS.source_info_dict), - "ota-type": "BLOCK", - } + if OPTIONS.override_prop: + metadata = { + "ota-type": "BLOCK", + } + else: + metadata = { + "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, + OPTIONS.source_info_dict), + "ota-type": "BLOCK", + } post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict) pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict) @@ -836,7 +980,9 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip): source_zip=source_zip, source_version=source_version, target_zip=target_zip, + input_zip=target_zip, target_version=target_version, + input_version=target_version, output_zip=output_zip, script=script, metadata=metadata, @@ -945,6 +1091,9 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip): script.AppendExtra(""" if get_stage("%(bcb_dev)s") == "2/3" then """ % bcb_dev) + + # Stage 2/3: Write recovery image to /recovery (currently running /boot). + script.Comment("Stage 2/3") script.AppendExtra("sleep(20);\n") script.WriteRawImage("/recovery", "recovery.img") script.AppendExtra(""" @@ -953,6 +1102,9 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip): else if get_stage("%(bcb_dev)s") != "3/3" then """ % bcb_dev) + # Stage 1/3: (a) Verify the current system. + script.Comment("Stage 1/3") + # Dump fingerprints script.Print("Source: %s" % CalculateFingerprint( oem_props, oem_dict, OPTIONS.source_info_dict)) @@ -1016,13 +1168,18 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip): device_specific.IncrementalOTA_VerifyEnd() if OPTIONS.two_step: - script.WriteRawImage("/boot", "recovery.img") + # Stage 1/3: (b) Write recovery image to /boot. + _WriteRecoveryImageToBoot(script, output_zip) + script.AppendExtra(""" set_stage("%(bcb_dev)s", "2/3"); reboot_now("%(bcb_dev)s", ""); else """ % bcb_dev) + # Stage 3/3: Make changes. + script.Comment("Stage 3/3") + # Verify the existing partitions. system_diff.WriteVerifyScript(script, touched_blocks_only=True) if vendor_diff: @@ -1410,7 +1567,7 @@ def EmitVerification(self, script): so_far = 0 for tf, sf, _, _ in self.patch_list: if tf.name != sf.name: - script.SkipNextActionIfTargetExists(tf.name, tf.sha1) + script.SkipNextActionIfTargetExists("/" + tf.name, tf.sha1) script.PatchCheck("/"+sf.name, tf.sha1, sf.sha1) so_far += sf.size return so_far @@ -1442,7 +1599,7 @@ def EmitPatches(self, script, total_patch_size, so_far): deferred_patch_list.append(item) continue if sf.name != tf.name: - script.SkipNextActionIfTargetExists(tf.name, tf.sha1) + script.SkipNextActionIfTargetExists("/" + tf.name, tf.sha1) script.ApplyPatch("/" + sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/" + sf.name + ".p") so_far += tf.size @@ -1495,11 +1652,16 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): oem_dict = common.LoadDictionaryFromLines( open(OPTIONS.oem_source).readlines()) - metadata = { - "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, - OPTIONS.source_info_dict), - "ota-type": "FILE", - } + if OPTIONS.override_prop: + metadata = { + "ota-type": "FILE", + } + else: + metadata = { + "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict, + OPTIONS.source_info_dict), + "ota-type": "FILE", + } post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict) pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict) @@ -1526,7 +1688,9 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): source_zip=source_zip, source_version=source_version, target_zip=target_zip, + input_zip=target_zip, target_version=target_version, + input_version=target_version, output_zip=output_zip, script=script, metadata=metadata, @@ -1540,23 +1704,24 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): else: vendor_diff = None - target_fp = CalculateFingerprint(oem_props, oem_dict, - OPTIONS.target_info_dict) - source_fp = CalculateFingerprint(oem_props, oem_dict, - OPTIONS.source_info_dict) + if not OPTIONS.override_prop: + target_fp = CalculateFingerprint(oem_props, oem_dict, + OPTIONS.target_info_dict) + source_fp = CalculateFingerprint(oem_props, oem_dict, + OPTIONS.source_info_dict) - if oem_props is None: - script.AssertSomeFingerprint(source_fp, target_fp) - else: - script.AssertSomeThumbprint( - GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict), - GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict)) + if oem_props is None: + script.AssertSomeFingerprint(source_fp, target_fp) + else: + script.AssertSomeThumbprint( + GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict), + GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict)) - metadata["pre-build"] = source_fp - metadata["post-build"] = target_fp - metadata["pre-build-incremental"] = GetBuildProp( + metadata["pre-build"] = source_fp + metadata["post-build"] = target_fp + metadata["pre-build-incremental"] = GetBuildProp( "ro.build.version.incremental", OPTIONS.source_info_dict) - metadata["post-build-incremental"] = GetBuildProp( + metadata["post-build-incremental"] = GetBuildProp( "ro.build.version.incremental", OPTIONS.target_info_dict) source_boot = common.GetBootableImage( @@ -1616,6 +1781,9 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): script.AppendExtra(""" if get_stage("%(bcb_dev)s") == "2/3" then """ % bcb_dev) + + # Stage 2/3: Write recovery image to /recovery (currently running /boot). + script.Comment("Stage 2/3") script.AppendExtra("sleep(20);\n") script.WriteRawImage("/recovery", "recovery.img") script.AppendExtra(""" @@ -1624,9 +1792,14 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): else if get_stage("%(bcb_dev)s") != "3/3" then """ % bcb_dev) + # Stage 1/3: (a) Verify the current system. + script.Comment("Stage 1/3") + # Dump fingerprints - script.Print("Source: %s" % (source_fp,)) - script.Print("Target: %s" % (target_fp,)) + script.Print("Source: %s" % CalculateFingerprint( + oem_props, oem_dict, OPTIONS.source_info_dict)) + script.Print("Target: %s" % CalculateFingerprint( + oem_props, oem_dict, OPTIONS.target_info_dict)) script.Print("Verifying current system...") @@ -1645,20 +1818,24 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): size.append(vendor_diff.largest_source_size) if updating_boot: + boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict) d = common.Difference(target_boot, source_boot) _, _, d = d.ComputePatch() - print "boot target: %d source: %d diff: %d" % ( - target_boot.size, source_boot.size, len(d)) + if d is None: + include_full_boot = True + common.ZipWriteStr(output_zip, "boot.img", target_boot.data) + else: + include_full_boot = False - common.ZipWriteStr(output_zip, "patch/boot.img.p", d) + print "boot target: %d source: %d diff: %d" % ( + target_boot.size, source_boot.size, len(d)) - boot_type, boot_device = common.GetTypeAndDevice( - "/boot", OPTIONS.source_info_dict) + common.ZipWriteStr(output_zip, "patch/boot.img.p", d) - script.PatchCheck("%s:%s:%d:%s:%d:%s" % - (boot_type, boot_device, - source_boot.size, source_boot.sha1, - target_boot.size, target_boot.sha1)) + script.PatchCheck("%s:%s:%d:%s:%d:%s" % + (boot_type, boot_device, + source_boot.size, source_boot.sha1, + target_boot.size, target_boot.sha1)) so_far += source_boot.size size.append(target_boot.size) @@ -1668,13 +1845,18 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): device_specific.IncrementalOTA_VerifyEnd() if OPTIONS.two_step: - script.WriteRawImage("/boot", "recovery.img") + # Stage 1/3: (b) Write recovery image to /boot. + _WriteRecoveryImageToBoot(script, output_zip) + script.AppendExtra(""" set_stage("%(bcb_dev)s", "2/3"); reboot_now("%(bcb_dev)s", ""); else """ % bcb_dev) + # Stage 3/3: Make changes. + script.Comment("Stage 3/3") + script.Comment("---- start making changes here ----") device_specific.IncrementalOTA_InstallBegin() @@ -1704,20 +1886,23 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip): if not OPTIONS.two_step: if updating_boot: - # Produce the boot image by applying a patch to the current - # contents of the boot partition, and write it back to the - # partition. - script.Print("Patching boot image...") - script.ApplyPatch("%s:%s:%d:%s:%d:%s" - % (boot_type, boot_device, - source_boot.size, source_boot.sha1, - target_boot.size, target_boot.sha1), - "-", - target_boot.size, target_boot.sha1, - source_boot.sha1, "patch/boot.img.p") - so_far += target_boot.size - script.SetProgress(so_far / total_patch_size) - print "boot image changed; including." + if include_full_boot: + print "boot image changed; including full." + script.Print("Installing boot image...") + script.WriteRawImage("/boot", "boot.img") + else: + # Produce the boot image by applying a patch to the current + # contents of the boot partition, and write it back to the + # partition. + print "boot image changed; including patch." + script.Print("Patching boot image...") + script.ApplyPatch("%s:%s:%d:%s:%d:%s" + % (boot_type, boot_device, + source_boot.size, source_boot.sha1, + target_boot.size, target_boot.sha1), + "-", + target_boot.size, target_boot.sha1, + source_boot.sha1, "patch/boot.img.p") else: print "boot image unchanged; skipping." @@ -1949,6 +2134,8 @@ def option_handler(o, a): OPTIONS.payload_signer = a elif o == "--payload_signer_args": OPTIONS.payload_signer_args = shlex.split(a) + elif o in ("--backup"): + OPTIONS.backuptool = bool(a.lower() == 'true') else: return False return True @@ -1980,6 +2167,7 @@ def option_handler(o, a): "log_diff=", "payload_signer=", "payload_signer_args=", + "backup=" ], extra_option_handler=option_handler) if len(args) != 2: @@ -2004,6 +2192,11 @@ def option_handler(o, a): OPTIONS.info_dict = common.LoadInfoDict(input_zip) common.ZipClose(input_zip) + if "ota_override_device" in OPTIONS.info_dict: + OPTIONS.override_device = OPTIONS.info_dict.get("ota_override_device") + if "ota_override_prop" in OPTIONS.info_dict: + OPTIONS.override_prop = OPTIONS.info_dict.get("ota_override_prop") == "true" + ab_update = OPTIONS.info_dict.get("ab_update") == "true" if ab_update: diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py index c9bd375e91d..ddc9bedb159 100644 --- a/tools/releasetools/rangelib.py +++ b/tools/releasetools/rangelib.py @@ -46,6 +46,8 @@ def __ne__(self, other): def __nonzero__(self): return bool(self.data) + def __bool__(self): + return self.__nonzero__() def __str__(self): if not self.data: diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py index 0f20f826afa..59d486ba729 100755 --- a/tools/releasetools/sign_target_files_apks.py +++ b/tools/releasetools/sign_target_files_apks.py @@ -80,14 +80,15 @@ with keyid of the cert pointed by . """ +from __future__ import print_function + import sys if sys.hexversion < 0x02070000: - print >> sys.stderr, "Python 2.7 or newer is required." + print("Python 2.7 or newer is required.", file=sys.stderr) sys.exit(1) import base64 -import cStringIO import copy import errno import os @@ -97,6 +98,11 @@ import tempfile import zipfile +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO + import add_img_to_target_files import common @@ -114,11 +120,11 @@ def GetApkCerts(tf_zip): certmap = common.ReadApkCerts(tf_zip) # apply the key remapping to the contents of the file - for apk, cert in certmap.iteritems(): + for apk, cert in certmap.items(): certmap[apk] = OPTIONS.key_map.get(cert, cert) # apply all the -e options, overriding anything in the file - for apk, cert in OPTIONS.extra_apks.iteritems(): + for apk, cert in OPTIONS.extra_apks.items(): if not cert: cert = "PRESIGNED" certmap[apk] = OPTIONS.key_map.get(cert, cert) @@ -136,10 +142,10 @@ def CheckAllApksSigned(input_tf_zip, apk_key_map): if name not in apk_key_map: unknown_apks.append(name) if unknown_apks: - print "ERROR: no key specified for:\n\n ", - print "\n ".join(unknown_apks) - print "\nUse '-e =' to specify a key (which may be an" - print "empty string to not sign this apk)." + print("ERROR: no key specified for:\n\n ", end=' ') + print("\n ".join(unknown_apks)) + print("\nUse '-e =' to specify a key (which may be an") + print("empty string to not sign this apk).") sys.exit(1) @@ -219,13 +225,13 @@ def write_to_temp(fn, attr, data): name = os.path.basename(info.filename) key = apk_key_map[name] if key not in common.SPECIAL_CERT_STRINGS: - print " signing: %-*s (%s)" % (maxsize, name, key) + print(" signing: %-*s (%s)" % (maxsize, name, key)) signed_data = SignApk(data, key, key_passwords[key], platform_api_level, codename_to_api_level_map) common.ZipWriteStr(output_tf_zip, out_info, signed_data) else: # an APK we're not supposed to sign. - print "NOT signing: %s" % (name,) + print("NOT signing: %s" % name) common.ZipWriteStr(output_tf_zip, out_info, data) # System properties. @@ -234,7 +240,7 @@ def write_to_temp(fn, attr, data): "BOOT/RAMDISK/default.prop", "ROOT/default.prop", "RECOVERY/RAMDISK/default.prop"): - print "rewriting %s:" % (info.filename,) + print("rewriting %s:" % info.filename) new_data = RewriteProps(data, misc_info) common.ZipWriteStr(output_tf_zip, out_info, new_data) if info.filename in ("BOOT/RAMDISK/default.prop", @@ -243,7 +249,11 @@ def write_to_temp(fn, attr, data): write_to_temp(info.filename, info.external_attr, new_data) elif info.filename.endswith("mac_permissions.xml"): - print "rewriting %s with new keys." % (info.filename,) + print("rewriting %s with new keys." % info.filename) + new_data = ReplaceCerts(data) + common.ZipWriteStr(output_tf_zip, out_info, new_data) + elif info.filename.startswith("SYSTEM/etc/permissions/"): + print("rewriting %s with new keys." % info.filename) new_data = ReplaceCerts(data) common.ZipWriteStr(output_tf_zip, out_info, new_data) @@ -354,10 +364,10 @@ def ReplaceCerts(data): """Given a string of data, replace all occurences of a set of X509 certs with a newer set of X509 certs and return the updated data string.""" - for old, new in OPTIONS.key_map.iteritems(): + for old, new in OPTIONS.key_map.items(): try: if OPTIONS.verbose: - print " Replacing %s.x509.pem with %s.x509.pem" % (old, new) + print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new)) f = open(old + ".x509.pem") old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower() f.close() @@ -368,14 +378,14 @@ def ReplaceCerts(data): pattern = "\\b"+old_cert16+"\\b" (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE) if OPTIONS.verbose: - print " Replaced %d occurence(s) of %s.x509.pem with " \ - "%s.x509.pem" % (num, old, new) + print(" Replaced %d occurence(s) of %s.x509.pem with " + "%s.x509.pem" % (num, old, new)) except IOError as e: if e.errno == errno.ENOENT and not OPTIONS.verbose: continue - print " Error accessing %s. %s. Skip replacing %s.x509.pem " \ - "with %s.x509.pem." % (e.filename, e.strerror, old, new) + print(" Error accessing %s. %s. Skip replacing %s.x509.pem " + "with %s.x509.pem." % (e.filename, e.strerror, old, new)) return data @@ -415,7 +425,7 @@ def RewriteProps(data, misc_info): value = "/".join(pieces) elif key == "ro.build.description": pieces = value.split(" ") - assert len(pieces) == 5 + #assert len(pieces) == 5 pieces[-1] = EditTags(pieces[-1]) value = " ".join(pieces) elif key == "ro.build.tags": @@ -428,8 +438,8 @@ def RewriteProps(data, misc_info): value = " ".join(value) line = key + "=" + value if line != original_line: - print " replace: ", original_line - print " with: ", line + print(" replace: ", original_line) + print(" with: ", line) output.append(line) return "\n".join(output) + "\n" @@ -445,7 +455,7 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem" for k in extra_recovery_keys.split()] if extra_recovery_keys: - print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys) + print("extra recovery-only key(s): " + ", ".join(extra_recovery_keys)) else: extra_recovery_keys = [] @@ -459,8 +469,8 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem") if mapped_keys: - print "using:\n ", "\n ".join(mapped_keys) - print "for OTA package verification" + print("using:\n ", "\n ".join(mapped_keys)) + print("for OTA package verification") else: devkey = misc_info.get("default_system_dev_certificate", "build/target/product/security/testkey") @@ -472,11 +482,11 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): # recovery uses a version of the key that has been slightly # predigested (by DumpPublicKey.java) and put in res/keys. # extra_recovery_keys are used only in recovery. - - p = common.Run(["java", "-jar", - os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")] - + mapped_keys + extra_recovery_keys, - stdout=subprocess.PIPE) + cmd = ([OPTIONS.java_path] + OPTIONS.java_args + + ["-jar", + os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")] + + mapped_keys + extra_recovery_keys) + p = common.Run(cmd, stdout=subprocess.PIPE) new_recovery_keys, _ = p.communicate() if p.returncode != 0: raise common.ExternalError("failed to run dumpkeys") @@ -488,11 +498,19 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): recovery_keys_location = "RECOVERY/RAMDISK/res/keys" common.ZipWriteStr(output_tf_zip, recovery_keys_location, new_recovery_keys) + # Save the base64 key representation in the update for key-change + # validations + p = common.Run(["python", "build/tools/getb64key.py", mapped_keys[0]], + stdout=subprocess.PIPE) + data, _ = p.communicate() + if p.returncode == 0: + common.ZipWriteStr(output_tf_zip, "META/releasekey.txt", data) + # SystemUpdateActivity uses the x509.pem version of the keys, but # put into a zipfile system/etc/security/otacerts.zip. # We DO NOT include the extra_recovery_keys (if any) here. - temp_file = cStringIO.StringIO() + temp_file = StringIO() certs_zip = zipfile.ZipFile(temp_file, "w") for k in mapped_keys: common.ZipWrite(certs_zip, k) @@ -526,7 +544,7 @@ def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): def ReplaceVerityPublicKey(targetfile_zip, filename, key_path): - print "Replacing verity public key with %s" % key_path + print("Replacing verity public key with %s" % key_path) with open(key_path) as f: data = f.read() common.ZipWriteStr(targetfile_zip, filename, data) @@ -535,7 +553,7 @@ def ReplaceVerityPublicKey(targetfile_zip, filename, key_path): def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip, misc_info, key_path): - print "Replacing verity private key with %s" % key_path + print("Replacing verity private key with %s" % key_path) current_key = misc_info["verity_key"] original_misc_info = targetfile_input_zip.read("META/misc_info.txt") new_misc_info = original_misc_info.replace(current_key, key_path) @@ -713,16 +731,20 @@ def option_handler(o, a): common.ZipClose(input_zip) common.ZipClose(output_zip) - add_img_to_target_files.AddImagesToTargetFiles(args[1]) + # Skip building userdata.img and cache.img when signing the target files. + new_args = ["--is_signing", args[1]] + add_img_to_target_files.main(new_args) - print "done." + print("done.") if __name__ == '__main__': try: main(sys.argv[1:]) - except common.ExternalError, e: - print - print " ERROR: %s" % (e,) - print + except common.ExternalError as e: + print() + print(" ERROR: %s" % e) + print() sys.exit(1) + finally: + common.Cleanup() diff --git a/tools/releasetools/sign_zip.py b/tools/releasetools/sign_zip.py new file mode 100755 index 00000000000..c40b1b42c36 --- /dev/null +++ b/tools/releasetools/sign_zip.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# +# Copyright (C) 2017 The LineageOS Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Signs the given zip with the given key producing a new zip. + +Usage: sign_release_zip [flags] input_zip output_zip + + -k (--package_key) Key to use to sign the package (default is + "build/target/product/security/testkey"). +""" +import sys + +import common + +OPTIONS = common.OPTIONS + +OPTIONS.package_key = "build/target/product/security/testkey" + +def SignOutput(input_zip_name, output_zip_name): + key_passwords = common.GetKeyPasswords([OPTIONS.package_key]) + pw = key_passwords[OPTIONS.package_key] + + common.SignFile(input_zip_name, output_zip_name, OPTIONS.package_key, pw, + whole_file=True) + + +def main(argv): + + def option_handler(o, a): + if o in ("-k", "--package_key"): + OPTIONS.package_key = a + else: + return False + return True + + args = common.ParseOptions(argv, __doc__, + extra_opts="k:", + extra_long_opts=[ + "package_key=", + ], extra_option_handler=option_handler) + if len(args) != 2: + common.Usage(__doc__) + sys.exit(1) + + SignOutput(args[0], args[1]) + + +if __name__ == '__main__': + try: + main(sys.argv[1:]) + except common.ExternalError as e: + print() + print(" ERROR: %s" % e) + print() + sys.exit(1) diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py index 4ba7560dfc2..c4ac37159d7 100644 --- a/tools/releasetools/sparse_img.py +++ b/tools/releasetools/sparse_img.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import bisect import os import struct diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py new file mode 100644 index 00000000000..03e8c8beeab --- /dev/null +++ b/tools/releasetools/test_blockimgdiff.py @@ -0,0 +1,77 @@ +# +# Copyright (C) 2016 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import print_function + +import common +import unittest + +from collections import OrderedDict +from blockimgdiff import BlockImageDiff, EmptyImage, DataImage, Transfer +from rangelib import RangeSet + +class BlockImageDiffTest(unittest.TestCase): + + def test_GenerateDigraphOrder(self): + """Make sure GenerateDigraph preserves the order. + + t0: <0-5> => <...> + t1: <0-7> => <...> + t2: <0-4> => <...> + t3: <...> => <0-10> + + t0, t1 and t2 must go before t3, i.e. t3.goes_after = + { t0:..., t1:..., t2:... }. But the order of t0-t2 must be preserved. + """ + + src = EmptyImage() + tgt = EmptyImage() + block_image_diff = BlockImageDiff(tgt, src) + + transfers = block_image_diff.transfers + t0 = Transfer( + "t1", "t1", RangeSet("10-15"), RangeSet("0-5"), "move", transfers) + t1 = Transfer( + "t2", "t2", RangeSet("20-25"), RangeSet("0-7"), "move", transfers) + t2 = Transfer( + "t3", "t3", RangeSet("30-35"), RangeSet("0-4"), "move", transfers) + t3 = Transfer( + "t4", "t4", RangeSet("0-10"), RangeSet("40-50"), "move", transfers) + + block_image_diff.GenerateDigraph() + t3_goes_after_copy = t3.goes_after.copy() + + # Elements in the set must be in the transfer evaluation order. + elements = list(t3_goes_after_copy) + self.assertEqual(t0, elements[0]) + self.assertEqual(t1, elements[1]) + self.assertEqual(t2, elements[2]) + + # Now switch the order of t0, t1 and t2. + transfers[0], transfers[1], transfers[2] = ( + transfers[2], transfers[0], transfers[1]) + t3.goes_after.clear() + t3.goes_before.clear() + block_image_diff.GenerateDigraph() + + # The goes_after must be different from last run. + self.assertNotEqual(t3_goes_after_copy, t3.goes_after) + + # Assert that each element must agree with the transfer order. + elements = list(t3.goes_after) + self.assertEqual(t2, elements[0]) + self.assertEqual(t0, elements[1]) + self.assertEqual(t1, elements[2]) diff --git a/tools/repopick.py b/tools/repopick.py new file mode 100755 index 00000000000..38b20afb971 --- /dev/null +++ b/tools/repopick.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python +# +# Copyright (C) 2013-15 The CyanogenMod Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Run repopick.py -h for a description of this utility. +# + +from __future__ import print_function + +import sys +import json +import os +import subprocess +import re +import argparse +import textwrap +from xml.etree import ElementTree + +try: + # For python3 + import urllib.error + import urllib.request +except ImportError: + # For python2 + import imp + import urllib2 + urllib = imp.new_module('urllib') + urllib.error = urllib2 + urllib.request = urllib2 + + +# Verifies whether pathA is a subdirectory (or the same) as pathB +def is_subdir(a, b): + a = os.path.realpath(a) + '/' + b = os.path.realpath(b) + '/' + return b == a[:len(b)] + + +def fetch_query_via_ssh(remote_url, query): + """Given a remote_url and a query, return the list of changes that fit it + This function is slightly messy - the ssh api does not return data in the same structure as the HTTP REST API + We have to get the data, then transform it to match what we're expecting from the HTTP RESET API""" + if remote_url.count(':') == 2: + (uri, userhost, port) = remote_url.split(':') + userhost = userhost[2:] + elif remote_url.count(':') == 1: + (uri, userhost) = remote_url.split(':') + userhost = userhost[2:] + port = 29418 + else: + raise Exception('Malformed URI: Expecting ssh://[user@]host[:port]') + + + out = subprocess.check_output(['ssh', '-x', '-p{0}'.format(port), userhost, 'gerrit', 'query', '--format=JSON --patch-sets --current-patch-set', query]) + if not hasattr(out, 'encode'): + out = out.decode() + reviews = [] + for line in out.split('\n'): + try: + data = json.loads(line) + # make our data look like the http rest api data + review = { + 'branch': data['branch'], + 'change_id': data['id'], + 'current_revision': data['currentPatchSet']['revision'], + 'number': int(data['number']), + 'revisions': {patch_set['revision']: { + 'number': int(patch_set['number']), + 'fetch': { + 'ssh': { + 'ref': patch_set['ref'], + 'url': 'ssh://{0}:{1}/{2}'.format(userhost, port, data['project']) + } + } + } for patch_set in data['patchSets']}, + 'subject': data['subject'], + 'project': data['project'], + 'status': data['status'] + } + reviews.append(review) + except: + pass + args.quiet or print('Found {0} reviews'.format(len(reviews))) + return reviews + + +def fetch_query_via_http(remote_url, query): + + """Given a query, fetch the change numbers via http""" + url = '{0}/changes/?q={1}&o=CURRENT_REVISION&o=ALL_REVISIONS'.format(remote_url, query) + data = urllib.request.urlopen(url).read().decode('utf-8') + reviews = json.loads(data[5:]) + + for review in reviews: + review['number'] = review.pop('_number') + + return reviews + + +def fetch_query(remote_url, query): + """Wrapper for fetch_query_via_proto functions""" + if remote_url[0:3] == 'ssh': + return fetch_query_via_ssh(remote_url, query) + elif remote_url[0:4] == 'http': + return fetch_query_via_http(remote_url, query.replace(' ', '+')) + else: + raise Exception('Gerrit URL should be in the form http[s]://hostname/ or ssh://[user@]host[:port]') + +if __name__ == '__main__': + # Default to AICP Gerrit + default_gerrit = 'http://gerrit.aicp-rom.com' + + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=textwrap.dedent('''\ + repopick.py is a utility to simplify the process of cherry picking + patches from AICP's Gerrit instance (or any gerrit instance of your choosing) + + Given a list of change numbers, repopick will cd into the project path + and cherry pick the latest patch available. + + With the --start-branch argument, the user can specify that a branch + should be created before cherry picking. This is useful for + cherry-picking many patches into a common branch which can be easily + abandoned later (good for testing other's changes.) + + The --abandon-first argument, when used in conjunction with the + --start-branch option, will cause repopick to abandon the specified + branch in all repos first before performing any cherry picks.''')) + parser.add_argument('change_number', nargs='*', help='change number to cherry pick. Use {change number}/{patchset number} to get a specific revision.') + parser.add_argument('-i', '--ignore-missing', action='store_true', help='do not error out if a patch applies to a missing directory') + parser.add_argument('-s', '--start-branch', nargs=1, help='start the specified branch before cherry picking') + parser.add_argument('-r', '--reset', action='store_true', help='reset to initial state (abort cherry-pick) if there is a conflict') + parser.add_argument('-a', '--abandon-first', action='store_true', help='before cherry picking, abandon the branch specified in --start-branch') + parser.add_argument('-b', '--auto-branch', action='store_true', help='shortcut to "--start-branch auto --abandon-first --ignore-missing"') + parser.add_argument('-q', '--quiet', action='store_true', help='print as little as possible') + parser.add_argument('-v', '--verbose', action='store_true', help='print extra information to aid in debug') + parser.add_argument('-f', '--force', action='store_true', help='force cherry pick even if change is closed') + parser.add_argument('-p', '--pull', action='store_true', help='execute pull instead of cherry-pick') + parser.add_argument('-P', '--path', help='use the specified path for the change') + parser.add_argument('-t', '--topic', help='pick all commits from a specified topic') + parser.add_argument('-Q', '--query', help='pick all commits using the specified query') + parser.add_argument('-g', '--gerrit', default=default_gerrit, help='Gerrit Instance to use. Form proto://[user@]host[:port]') + parser.add_argument('-e', '--exclude', nargs=1, help='exclude a list of commit numbers separated by a ,') + args = parser.parse_args() + if not args.start_branch and args.abandon_first: + parser.error('if --abandon-first is set, you must also give the branch name with --start-branch') + if args.auto_branch: + args.abandon_first = True + args.ignore_missing = True + if not args.start_branch: + args.start_branch = ['auto'] + if args.quiet and args.verbose: + parser.error('--quiet and --verbose cannot be specified together') + + if (1 << bool(args.change_number) << bool(args.topic) << bool(args.query)) != 2: + parser.error('One (and only one) of change_number, topic, and query are allowed') + + # Change current directory to the top of the tree + if 'ANDROID_BUILD_TOP' in os.environ: + top = os.environ['ANDROID_BUILD_TOP'] + + if not is_subdir(os.getcwd(), top): + sys.stderr.write('ERROR: You must run this tool from within $ANDROID_BUILD_TOP!\n') + sys.exit(1) + os.chdir(os.environ['ANDROID_BUILD_TOP']) + + # Sanity check that we are being run from the top level of the tree + if not os.path.isdir('.repo'): + sys.stderr.write('ERROR: No .repo directory found. Please run this from the top of your tree.\n') + sys.exit(1) + + # If --abandon-first is given, abandon the branch before starting + if args.abandon_first: + # Determine if the branch already exists; skip the abandon if it does not + plist = subprocess.check_output(['repo', 'info']) + if not hasattr(plist, 'encode'): + plist = plist.decode() + needs_abandon = False + for pline in plist.splitlines(): + matchObj = re.match(r'Local Branches.*\[(.*)\]', pline) + if matchObj: + local_branches = re.split('\s*,\s*', matchObj.group(1)) + if any(args.start_branch[0] in s for s in local_branches): + needs_abandon = True + + if needs_abandon: + # Perform the abandon only if the branch already exists + if not args.quiet: + print('Abandoning branch: %s' % args.start_branch[0]) + subprocess.check_output(['repo', 'abandon', args.start_branch[0]]) + if not args.quiet: + print('') + + # Get the master manifest from repo + # - convert project name and revision to a path + project_name_to_data = {} + manifest = subprocess.check_output(['repo', 'manifest']) + xml_root = ElementTree.fromstring(manifest) + projects = xml_root.findall('project') + remotes = xml_root.findall('remote') + default_revision = xml_root.findall('default')[0].get('revision') + + #dump project data into the a list of dicts with the following data: + #{project: {path, revision}} + + for project in projects: + name = project.get('name') + path = project.get('path') + revision = project.get('revision') + if revision is None: + for remote in remotes: + if remote.get('name') == project.get('remote'): + revision = remote.get('revision') + if revision is None: + revision = default_revision + + if not name in project_name_to_data: + project_name_to_data[name] = {} + revision = revision.split('refs/heads/')[-1] + project_name_to_data[name][revision] = path + + # get data on requested changes + reviews = [] + change_numbers = [] + if args.topic: + reviews = fetch_query(args.gerrit, 'topic:{0}'.format(args.topic)) + change_numbers = sorted([str(r['number']) for r in reviews]) + if args.query: + reviews = fetch_query(args.gerrit, args.query) + change_numbers = sorted([str(r['number']) for r in reviews]) + if args.change_number: + for c in args.change_number: + if '-' in c: + templist = c.split('-') + for i in range(int(templist[0]), int(templist[1]) + 1): + change_numbers.append(str(i)) + else: + change_numbers.append(c) + reviews = fetch_query(args.gerrit, ' OR '.join('change:{0}'.format(x.split('/')[0]) for x in change_numbers)) + + # make list of things to actually merge + mergables = [] + + # If --exclude is given, create the list of commits to ignore + exclude = [] + if args.exclude: + exclude = args.exclude[0].split(',') + + for change in change_numbers: + patchset = None + if '/' in change: + (change, patchset) = change.split('/') + + if change in exclude: + continue + + change = int(change) + review = next((x for x in reviews if x['number'] == change), None) + if review is None: + print('Change %d not found, skipping' % change) + continue + + mergables.append({ + 'subject': review['subject'], + 'project': review['project'], + 'branch': review['branch'], + 'change_id': review['change_id'], + 'change_number': review['number'], + 'status': review['status'], + 'fetch': None + }) + mergables[-1]['fetch'] = review['revisions'][review['current_revision']]['fetch'] + mergables[-1]['id'] = change + if patchset: + try: + mergables[-1]['fetch'] = [x['fetch'] for x in review['revisions'] if x['_number'] == patchset][0] + mergables[-1]['id'] = '{0}/{1}'.format(change, patchset) + except (IndexError, ValueError): + args.quiet or print('ERROR: The patch set {0}/{1} could not be found, using CURRENT_REVISION instead.'.format(change, patchset)) + + for item in mergables: + args.quiet or print('Applying change number {0}...'.format(item['id'])) + # Check if change is open and exit if it's not, unless -f is specified + if (item['status'] != 'OPEN' and item['status'] != 'NEW' and item['status'] != 'DRAFT') and not args.query: + if args.force: + print('!! Force-picking a closed change !!\n') + else: + print('Change status is ' + item['status'] + '. Skipping the cherry pick.\nUse -f to force this pick.') + continue + + # Convert the project name to a project path + # - check that the project path exists + project_path = None + + if item['project'] in project_name_to_data and item['branch'] in project_name_to_data[item['project']]: + project_path = project_name_to_data[item['project']][item['branch']] + elif args.path: + project_path = args.path + elif args.ignore_missing: + print('WARNING: Skipping {0} since there is no project directory for: {1}\n'.format(item['id'], item['project'])) + continue + else: + sys.stderr.write('ERROR: For {0}, could not determine the project path for project {1}\n'.format(item['id'], item['project'])) + sys.exit(1) + + # If --start-branch is given, create the branch (more than once per path is okay; repo ignores gracefully) + if args.start_branch: + subprocess.check_output(['repo', 'start', args.start_branch[0], project_path]) + + # Determine the maximum commits to check already picked changes + check_picked_count = 10 + branch_commits_count = int(subprocess.check_output(['git', 'rev-list', '--count', 'HEAD'], cwd=project_path)) + if branch_commits_count <= check_picked_count: + check_picked_count = branch_commits_count - 1 + + # Check if change is already picked to HEAD...HEAD~check_picked_count + found_change = False + for i in range(0, check_picked_count): + if subprocess.call(['git', 'cat-file', '-e', 'HEAD~{0}'.format(i)], cwd=project_path, stderr=open(os.devnull, 'wb')): + continue + output = subprocess.check_output(['git', 'show', '-q', 'HEAD~{0}'.format(i)], cwd=project_path).split() + if 'Change-Id:' in output: + head_change_id = '' + for j,t in enumerate(reversed(output)): + if t == 'Change-Id:': + head_change_id = output[len(output) - j] + break + if head_change_id.strip() == item['change_id']: + print('Skipping {0} - already picked in {1} as HEAD~{2}'.format(item['id'], project_path, i)) + found_change = True + break + if found_change: + continue + + # Print out some useful info + if not args.quiet: + print('--> Subject: "{0}"'.format(item['subject'])) + print('--> Project path: {0}'.format(project_path)) + print('--> Change number: {0} (Patch Set {0})'.format(item['id'])) + + if 'anonymous http' in item['fetch']: + method = 'anonymous http' + else: + method = 'ssh' + + # Try fetching from GitHub first if using default gerrit + if args.gerrit == default_gerrit: + if args.verbose: + print('Trying to fetch the change from GitHub') + + if args.pull: + cmd = ['git pull --no-edit github', item['fetch'][method]['ref']] + else: + cmd = ['git fetch github', item['fetch'][method]['ref']] + if args.quiet: + cmd.append('--quiet') + else: + print(cmd) + result = subprocess.call([' '.join(cmd)], cwd=project_path, shell=True) + FETCH_HEAD = '{0}/.git/FETCH_HEAD'.format(project_path) + if result != 0 and os.stat(FETCH_HEAD).st_size != 0: + print('ERROR: git command failed') + sys.exit(result) + # Check if it worked + if args.gerrit != default_gerrit or os.stat(FETCH_HEAD).st_size == 0: + # If not using the default gerrit or github failed, fetch from gerrit. + if args.verbose: + if args.gerrit == default_gerrit: + print('Fetching from GitHub didn\'t work, trying to fetch the change from Gerrit') + else: + print('Fetching from {0}'.format(args.gerrit)) + + if args.pull: + cmd = ['git pull --no-edit', item['fetch'][method]['url'], item['fetch'][method]['ref']] + else: + cmd = ['git fetch', item['fetch'][method]['url'], item['fetch'][method]['ref']] + if args.quiet: + cmd.append('--quiet') + else: + print(cmd) + result = subprocess.call([' '.join(cmd)], cwd=project_path, shell=True) + if result != 0: + print('ERROR: git command failed') + sys.exit(result) + # Perform the cherry-pick + if not args.pull: + cmd = ['git cherry-pick FETCH_HEAD'] + if args.quiet: + cmd_out = open(os.devnull, 'wb') + else: + cmd_out = None + result = subprocess.call(cmd, cwd=project_path, shell=True, stdout=cmd_out, stderr=cmd_out) + if result != 0: + if args.reset: + print('ERROR: git command failed, aborting cherry-pick') + cmd = ['git cherry-pick --abort'] + subprocess.call(cmd, cwd=project_path, shell=True, stdout=cmd_out, stderr=cmd_out) + else: + print('ERROR: git command failed') + sys.exit(result) + if not args.quiet: + print('') diff --git a/tools/roomcleaner.py b/tools/roomcleaner.py new file mode 100755 index 00000000000..668a04e5d74 --- /dev/null +++ b/tools/roomcleaner.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# Copyright (C) 2012 The CyanogenMod Project +# Copyright (C) 2012-2016 AICP Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import base64 +import filecmp +import json +import netrc +import os +import re +import sys +try: + # For python3 + import urllib.error + import urllib.parse + import urllib.request +except ImportError: + # For python2 + import imp + import urllib2 + import urlparse + urllib = imp.new_module('urllib') + urllib.error = urllib2 + urllib.parse = urlparse + urllib.request = urllib2 + +from xml.etree import ElementTree + +device_path = sys.argv[1] + +if device_path.endswith("/"): + device_path = device_path[:-1] + +try: + device = device_path[device_path.rfind("/") + 1:] +except: + device = device_path + +repositories = [] + +page = 1 + +local_manifests = r'.repo/local_manifests' +if not os.path.exists(local_manifests): os.makedirs(local_manifests) + +removal_manifest = ".repo/local_manifests/00_aicp_removals.xml" +tmp_removal_manifest = ".repo/local_manifests/tmp_aicp_removals.xml" + +def exists_in_tree(lm, repository): + for child in lm.getchildren(): + if child.attrib['name'].endswith(repository): + return child + return None + +# in-place prettyprint formatter +def indent(elem, level=0): + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + +def is_in_manifest(projectname): + try: + lm = ElementTree.parse(tmp_removal_manifest) + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for localpath in lm.findall("remove-project"): + if localpath.get("name") == projectname: + return 1 + + return None + +def add_to_manifest_dependencies(repositories): + try: + lm = ElementTree.parse(tmp_removal_manifest) + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for repository in repositories: + repo_name = repository['repository'] + existing_project = exists_in_tree(lm, repo_name) + if existing_project != None: + print('Remove-project for AICP/%s already exists' % (repo_name)) + continue + + print ('Adding remove-project to manifest: %s' % (repo_name)) + project = ElementTree.Element("remove-project", attrib = { "name": repo_name }) + + lm.append(project) + + indent(lm, 0) + raw_xml = ElementTree.tostring(lm).decode() + raw_xml = '\n' + raw_xml + + f = open(tmp_removal_manifest, 'w') + f.write(raw_xml) + f.close() + +def add_to_manifest(repositories): + try: + lm = ElementTree.parse(tmp_removal_manifest) + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for repository in repositories: + repo_name = repository['repository'] + if exists_in_tree(lm, repo_name): + print('Remove-project for AICP/%s already exists' % (repo_name)) + continue + + print ('Adding remove-project to manifest: %s' % (repo_name)) + project = ElementTree.Element("remove-project", attrib = { "name": "AICP/%s" % (repo_name) }) + + lm.append(project) + + indent(lm, 0) + raw_xml = ElementTree.tostring(lm).decode() + raw_xml = '\n' + raw_xml + + f = open(tmp_removal_manifest, 'w') + f.write(raw_xml) + f.close() + +def fetch_dependencies(repo_path): + #print('Looking for required remove-projects') + dependencies_path = repo_path + '/aicp.removal.dependencies' + syncable_repos = [] + + if os.path.exists(dependencies_path): + dependencies_file = open(dependencies_path, 'r') + dependencies = json.loads(dependencies_file.read()) + fetch_list = [] + + for dependency in dependencies: + if not is_in_manifest("%s" % dependency['repository']): + fetch_list.append(dependency) + + dependencies_file.close() + + if len(fetch_list) > 0: + add_to_manifest_dependencies(fetch_list) + + if len(syncable_repos) > 0: + print('Syncing dependencies') + os.system('repo sync --force-sync %s' % ' '.join(syncable_repos)) + +def remove_removals(removal_manifest): + tmp_manifest_disable = ".repo/local_manifests/aicp_manifest.xml" + tmp_manifest_disabled = ".repo/local_manifests/tmp_disabled" + syncable_repos = [] + try: + lm = ElementTree.parse(removal_manifest) + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + for child in lm.getchildren(): + syncable_repos.append(child.attrib['name']) + os.system('rm %s' % removal_manifest) + + if os.path.exists(tmp_manifest_disable): + os.system('mv %s %s' % (tmp_manifest_disable, tmp_manifest_disabled)) + for remove_dep in syncable_repos: + remove_dependency(os.popen('repo list -p %s' % remove_dep).read()[:-1], tmp_manifest_disabled) + if os.path.exists(tmp_manifest_disabled): + os.system('mv %s %s' % (tmp_manifest_disabled, tmp_manifest_disable)) + + os.system('repo sync --force-sync %s' % ' '.join(syncable_repos)) + +def remove_dependency(dependency_paths, manifest_path): + try: + lm = ElementTree.parse(manifest_path) + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + for child in lm.getchildren(): + for dependency_path in dependency_paths.split('\n'): + if child.attrib['path'] == dependency_path: + print('Removing dependency %s from local manifest to avoid conflicts' % child.attrib['path']) + lm.remove(child) + f = open(manifest_path, 'w') + f.write(ElementTree.tostring(lm).decode()) + f.close() + +fetch_dependencies(device_path) + +if os.path.exists(removal_manifest): + if os.path.exists(tmp_removal_manifest): + if filecmp.cmp(removal_manifest, tmp_removal_manifest): + print('Leaving repo removals in local manifest unchanged') + os.system('rm %s' % tmp_removal_manifest) + else: + print('Updating repo removals in local manifest') + remove_removals(removal_manifest) + os.system('mv %s %s' % (tmp_removal_manifest, removal_manifest)) + else: + print('Removing previous repo removals from local manifest') + remove_removals(removal_manifest) +elif os.path.exists(tmp_removal_manifest): + print('Created repo removals for local manifest') + os.system('mv %s %s' % (tmp_removal_manifest, removal_manifest)) +else: + print('No remove-project used or required') diff --git a/tools/roomservice.py b/tools/roomservice.py new file mode 100755 index 00000000000..32a5ac10f3b --- /dev/null +++ b/tools/roomservice.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python +# Copyright (C) 2012 The CyanogenMod Project +# Copyright (C) 2012-2014 AICP Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import base64 +import json +import netrc +import os +import re +import sys +try: + # For python3 + import urllib.error + import urllib.parse + import urllib.request +except ImportError: + # For python2 + import imp + import urllib2 + import urlparse + urllib = imp.new_module('urllib') + urllib.error = urllib2 + urllib.parse = urlparse + urllib.request = urllib2 + +from xml.etree import ElementTree + +product = sys.argv[1] + +if len(sys.argv) > 2: + depsonly = sys.argv[2] +else: + depsonly = None + +try: + device = product[product.index("_") + 1:] +except: + device = product + +if not depsonly: + print("Device %s not found. Attempting to retrieve device repository from AICP Github (https://github.com/AICP)." % device) + +repositories = [] + +page = 1 +while not depsonly: + try: + request = urllib.request.Request("https://api.github.com/users/AICP/repos?page=%d" % page) + if os.environ.get('GITHUB_API_USERNAME') is not None and os.environ.get('GITHUB_API_TOKEN') is not None: + base64string = base64.encodestring(('%s:%s' % (os.environ.get('GITHUB_API_USERNAME'), os.environ.get('GITHUB_API_TOKEN'))).encode()).decode().replace('\n', '') + request.add_header("Authorization", "Basic %s" % base64string) + result = json.loads(urllib.request.urlopen(request).read().decode()) + except: + print("API Error") + break + if len(result) == 0: + break + for res in result: + repositories.append(res) + page = page + 1 + +local_manifests = r'.repo/local_manifests' +if not os.path.exists(local_manifests): os.makedirs(local_manifests) + +def exists_in_tree(lm, repository): + for child in lm.getchildren(): + if child.attrib['path'].endswith(repository): + return child + return None + +def exists_in_tree_device(lm, repository): + for child in lm.getchildren(): + if child.attrib['name'].endswith(repository): + return child + return None + +# in-place prettyprint formatter +def indent(elem, level=0): + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + +def get_from_manifest(devicename): + try: + lm = ElementTree.parse(".repo/local_manifests/aicp_manifest.xml") + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for localpath in lm.findall("project"): + if re.search("device_.*_%s$" % device, localpath.get("name")): + return localpath.get("path") + + # Devices originally from AOSP are in the main manifest... + try: + mm = ElementTree.parse(".repo/manifest.xml") + mm = mm.getroot() + except: + mm = ElementTree.Element("manifest") + + for localpath in mm.findall("project"): + if re.search("device_.*_%s$" % device, localpath.get("name")): + return localpath.get("path") + + return None + +def is_in_manifest(projectname, branch): + try: + lm = ElementTree.parse(".repo/local_manifests/aicp_manifest.xml") + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for localpath in lm.findall("project"): + if localpath.get("name") == projectname and localpath.get("revision") == branch: + return 1 + + return None + +def add_to_manifest_dependencies(repositories): + try: + lm = ElementTree.parse(".repo/local_manifests/aicp_manifest.xml") + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for repository in repositories: + repo_name = repository['repository'] + repo_target = repository['target_path'] + existing_project = exists_in_tree(lm, repo_target) + if existing_project != None: + if existing_project.attrib['name'] != repository['repository']: + print ('Updating dependency %s' % (repo_name)) + existing_project.set('name', repository['repository']) + if existing_project.attrib['revision'] == repository['branch']: + print ('AICP/%s already exists' % (repo_name)) + else: + print ('updating branch for %s to %s' % (repo_name, repository['branch'])) + existing_project.set('revision', repository['branch']) + continue + + print ('Adding dependency: %s -> %s' % (repo_name, repo_target)) + project = ElementTree.Element("project", attrib = { "path": repo_target, + "remote": "aicp", "name": repo_name, "revision": "n7.1" }) + + if 'branch' in repository: + project.set('revision',repository['branch']) + + lm.append(project) + + indent(lm, 0) + raw_xml = ElementTree.tostring(lm).decode() + raw_xml = '\n' + raw_xml + + f = open('.repo/local_manifests/aicp_manifest.xml', 'w') + f.write(raw_xml) + f.close() + +def add_to_manifest(repositories): + try: + lm = ElementTree.parse(".repo/local_manifests/aicp_manifest.xml") + lm = lm.getroot() + except: + lm = ElementTree.Element("manifest") + + for repository in repositories: + repo_name = repository['repository'] + repo_target = repository['target_path'] + if exists_in_tree(lm, repo_name): + print('AICP/%s already exists' % (repo_name)) + continue + + print('Adding dependency: AICP/%s -> %s' % (repo_name, repo_target)) + project = ElementTree.Element("project", attrib = { "path": repo_target, + "remote": "aicp", "name": "AICP/%s" % repo_name, "revision": "n7.1" }) + + if 'branch' in repository: + project.set('revision',repository['branch']) + elif fallback_branch: + print("Using fallback branch %s for %s" % (fallback_branch, repo_name)) + project.set('revision', fallback_branch) + else: + print("Using default branch for %s" % repo_name) + + lm.append(project) + + indent(lm, 0) + raw_xml = ElementTree.tostring(lm).decode() + raw_xml = '\n' + raw_xml + + f = open('.repo/local_manifests/aicp_manifest.xml', 'w') + f.write(raw_xml) + f.close() + +def fetch_dependencies(repo_path, fallback_branch = None, first_dependency = False): + print('Looking for dependencies') + dependencies_path = repo_path + '/aicp.dependencies' + syncable_repos = [] + + if first_dependency: + os.system('build/tools/roomcleaner.py %s' % repo_path) + + if os.path.exists(dependencies_path): + dependencies_file = open(dependencies_path, 'r') + dependencies = json.loads(dependencies_file.read()) + fetch_list = [] + + for dependency in dependencies: + if not is_in_manifest("%s" % dependency['repository'], "%s" % dependency['branch']): + fetch_list.append(dependency) + syncable_repos.append(dependency['target_path']) + + dependencies_file.close() + + if len(fetch_list) > 0: + print('Adding dependencies to manifest') + add_to_manifest_dependencies(fetch_list) + else: + print('Dependencies file not found, bailing out.') + + if len(syncable_repos) > 0: + print('Syncing dependencies') + os.system('repo sync --force-sync %s' % ' '.join(syncable_repos)) + + for deprepo in syncable_repos: + fetch_dependencies(deprepo) + +def has_branch(branches, revision): + return revision in [branch['name'] for branch in branches] + + +if depsonly: + repo_path = get_from_manifest(device) + if repo_path: + fetch_dependencies(repo_path, None, True) + else: + print("Trying dependencies-only mode on a non-existing device tree?") + + sys.exit() + +else: + for repository in repositories: + repo_name = repository['name'] + if repo_name.startswith("device_") and repo_name.endswith("_" + device): + print("Found repository: %s" % repository['name']) + + manufacturer = repo_name.replace("device_", "").replace("_" + device, "") + + repo_path = "device/%s/%s" % (manufacturer, device) + + add_to_manifest([{'repository':repo_name,'target_path':repo_path,'branch':'n7.1'}]) + + print("Syncing repository to retrieve project.") + os.system('repo sync --force-sync %s' % repo_path) + print("Repository synced!") + + fetch_dependencies(repo_path, None, True) + print("Done") + sys.exit() + +print("Repository for %s not found in the AICP Github repository list. If this is in error, you may need to manually add it to your local_manifests/aicp_manifest.xml." % device) diff --git a/tools/warn.py b/tools/warn.py index 80971235c71..c22b40b4816 100755 --- a/tools/warn.py +++ b/tools/warn.py @@ -1,11 +1,13 @@ #!/usr/bin/env python # This file uses the following encoding: utf-8 +from __future__ import print_function + import sys import re if len(sys.argv) == 1: - print 'usage: ' + sys.argv[0] + ' ' + print('usage: ' + sys.argv[0] + ' ') sys.exit() # if you add another level, don't forget to give it a color below @@ -34,10 +36,10 @@ def colorforseverity(sev): return 'grey' warnpatterns = [ - { 'category':'make', 'severity':severity.MEDIUM, 'members':[], 'option':'', - 'description':'make: overriding commands/ignoring old commands', - 'patterns':[r".*: warning: overriding commands for target .+", - r".*: warning: ignoring old commands for target .+"] }, +# { 'category':'make', 'severity':severity.MEDIUM, 'members':[], 'option':'', +# 'description':'make: overriding commands/ignoring old commands', +# 'patterns':[r".*: warning: overriding commands for target .+", +# r".*: warning: ignoring old commands for target .+"] }, { 'category':'C/C++', 'severity':severity.HIGH, 'members':[], 'option':'-Wimplicit-function-declaration', 'description':'Implicit function declaration', 'patterns':[r".*: warning: implicit declaration of function .+"] }, @@ -399,7 +401,7 @@ def colorforseverity(sev): row_colors = [ 'e0e0e0', 'd0d0d0' ] def output(text): - print text, + print(text, end=' ') def htmlbig(param): return '' + param + '' diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp index a2dfd028911..899f852b63a 100644 --- a/tools/zipalign/ZipAlign.cpp +++ b/tools/zipalign/ZipAlign.cpp @@ -19,8 +19,9 @@ */ #include "ZipFile.h" -#include #include +#include +#include using namespace android; diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp index 2f33e230547..7c215f4ec8f 100644 --- a/tools/zipalign/ZipEntry.cpp +++ b/tools/zipalign/ZipEntry.cpp @@ -23,10 +23,11 @@ #include "ZipEntry.h" #include -#include -#include #include #include +#include +#include +#include using namespace android;
%(key)s%(val)s