Merge korg/donut into korg/master
Conflicts: core/apicheck_msg_current.txt core/combo/linux-arm.mk core/prelink-linux-arm.map tools/droiddoc/src/ClassInfo.java
This commit is contained in:
@@ -75,6 +75,14 @@ $(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/com.an
|
|||||||
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
|
||||||
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/com.android.inputmethod.pinyin.lib_intermediates)
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/com.android.inputmethod.pinyin.lib_intermediates)
|
||||||
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/PinyinIMEGoogleService_intermediates)
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/telephony)
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/product/*/obj)
|
||||||
|
$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/bin/tcpdump)
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/framework_intermediates/src/location)
|
||||||
|
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/product/*/obj/SHARED_LIBRARIES/lib?camera_intermediates)
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/product/*/obj/STATIC_LIBRARIES/lib?camera_intermediates)
|
||||||
|
$(call add-clean-step, rm -rf $(OUT_DIR)/target/product/*/obj/SHARED_LIBRARIES/libwebcore_intermediates)
|
||||||
|
|
||||||
# ************************************************
|
# ************************************************
|
||||||
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
|
# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
|
||||||
|
415
core/Makefile
415
core/Makefile
@@ -62,9 +62,6 @@ endif
|
|||||||
# Apps are always signed with test keys, and may be re-signed in a post-build
|
# Apps are always signed with test keys, and may be re-signed in a post-build
|
||||||
# step. If that happens, the "test-keys" tag will be removed by that step.
|
# step. If that happens, the "test-keys" tag will be removed by that step.
|
||||||
BUILD_VERSION_TAGS += test-keys
|
BUILD_VERSION_TAGS += test-keys
|
||||||
ifndef INCLUDE_TEST_OTA_KEYS
|
|
||||||
BUILD_VERSION_TAGS += ota-rel-keys
|
|
||||||
endif
|
|
||||||
BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS)))
|
BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS)))
|
||||||
|
|
||||||
# A human-readable string that descibes this build in detail.
|
# A human-readable string that descibes this build in detail.
|
||||||
@@ -129,10 +126,12 @@ $(INSTALLED_BUILD_PROP_TARGET): $(BUILDINFO_SH) $(INTERNAL_BUILD_ID_MAKEFILE)
|
|||||||
BUILD_NUMBER="$(BUILD_NUMBER)" \
|
BUILD_NUMBER="$(BUILD_NUMBER)" \
|
||||||
PLATFORM_VERSION="$(PLATFORM_VERSION)" \
|
PLATFORM_VERSION="$(PLATFORM_VERSION)" \
|
||||||
PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
|
PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
|
||||||
|
PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
|
||||||
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
|
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
|
||||||
TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
|
TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
|
||||||
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
|
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
|
||||||
TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
|
TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
|
||||||
|
TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
|
||||||
bash $(BUILDINFO_SH) > $@
|
bash $(BUILDINFO_SH) > $@
|
||||||
$(hide) if [ -f $(TARGET_DEVICE_DIR)/system.prop ]; then \
|
$(hide) if [ -f $(TARGET_DEVICE_DIR)/system.prop ]; then \
|
||||||
cat $(TARGET_DEVICE_DIR)/system.prop >> $@; \
|
cat $(TARGET_DEVICE_DIR)/system.prop >> $@; \
|
||||||
@@ -227,6 +226,15 @@ ifdef CREATE_MODULE_INFO_FILE
|
|||||||
"INSTALLED=\"$(strip $(ALL_MODULES.$(m).INSTALLED))\"" >> $(MODULE_INFO_FILE)))
|
"INSTALLED=\"$(strip $(ALL_MODULES.$(m).INSTALLED))\"" >> $(MODULE_INFO_FILE)))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
|
||||||
|
# The test key is used to sign this package, and as the key required
|
||||||
|
# for future OTA packages installed by this system. Actual product
|
||||||
|
# deliverables will be re-signed by hand. We expect this file to
|
||||||
|
# exist with the suffixes ".x509.pem" and ".pk8".
|
||||||
|
DEFAULT_KEY_CERT_PAIR := $(SRC_TARGET_DIR)/product/security/testkey
|
||||||
|
|
||||||
|
|
||||||
# Rules that need to be present for the simulator, even
|
# Rules that need to be present for the simulator, even
|
||||||
# if they don't do anything.
|
# if they don't do anything.
|
||||||
.PHONY: systemimage
|
.PHONY: systemimage
|
||||||
@@ -250,9 +258,9 @@ BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
|
|||||||
|
|
||||||
# We just build this directly to the install location.
|
# We just build this directly to the install location.
|
||||||
INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
|
INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
|
||||||
$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES)
|
$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) | $(MINIGZIP)
|
||||||
$(call pretty,"Target ram disk: $@")
|
$(call pretty,"Target ram disk: $@")
|
||||||
$(hide) $(MKBOOTFS) $(TARGET_ROOT_OUT) | gzip > $@
|
$(hide) $(MKBOOTFS) $(TARGET_ROOT_OUT) | $(MINIGZIP) > $@
|
||||||
|
|
||||||
|
|
||||||
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
|
ifneq ($(strip $(TARGET_NO_KERNEL)),true)
|
||||||
@@ -271,6 +279,11 @@ ifdef BOARD_KERNEL_CMDLINE
|
|||||||
INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
|
INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
BOARD_KERNEL_BASE := $(strip $(BOARD_KERNEL_BASE))
|
||||||
|
ifdef BOARD_KERNEL_BASE
|
||||||
|
INTERNAL_BOOTIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
|
||||||
|
endif
|
||||||
|
|
||||||
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
|
INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
|
||||||
|
|
||||||
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
|
ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
|
||||||
@@ -434,12 +447,16 @@ $(eval $(call combine-notice-files, \
|
|||||||
# the module processing has already been done -- in fact, we used the
|
# the module processing has already been done -- in fact, we used the
|
||||||
# fact that all that has been done to get the list of modules that we
|
# fact that all that has been done to get the list of modules that we
|
||||||
# need notice files for.
|
# need notice files for.
|
||||||
$(target_notice_file_html_gz): $(target_notice_file_html)
|
$(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
|
||||||
gzip -c $< > $@
|
$(hide) $(MINIGZIP) -9 < $< > $@
|
||||||
installed_notice_html_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
|
installed_notice_html_gz := $(TARGET_OUT)/etc/NOTICE.html.gz
|
||||||
$(installed_notice_html_gz): $(target_notice_file_html_gz) | $(ACP)
|
$(installed_notice_html_gz): $(target_notice_file_html_gz) | $(ACP)
|
||||||
$(copy-file-to-target)
|
$(copy-file-to-target)
|
||||||
|
|
||||||
|
# if we've been run my mm, mmm, etc, don't reinstall this every time
|
||||||
|
ifeq ($(ONE_SHOT_MAKEFILE),)
|
||||||
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_gz)
|
ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_gz)
|
||||||
|
endif
|
||||||
|
|
||||||
# The kernel isn't really a module, so to get its module file in there, we
|
# The kernel isn't really a module, so to get its module file in there, we
|
||||||
# make the target NOTICE files depend on this particular file too, which will
|
# make the target NOTICE files depend on this particular file too, which will
|
||||||
@@ -452,6 +469,23 @@ $(kernel_notice_file): \
|
|||||||
$(hide) $(ACP) $< $@
|
$(hide) $(ACP) $< $@
|
||||||
|
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
# Build a keystore with the authorized keys in it, used to verify the
|
||||||
|
# authenticity of downloaded OTA packages.
|
||||||
|
#
|
||||||
|
# This rule adds to ALL_DEFAULT_INSTALLED_MODULES, so it needs to come
|
||||||
|
# before the rules that use that variable to build the image.
|
||||||
|
ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/security/otacerts.zip
|
||||||
|
$(TARGET_OUT_ETC)/security/otacerts.zip: KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
|
||||||
|
$(TARGET_OUT_ETC)/security/otacerts.zip: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
|
||||||
|
$(hide) rm -f $@
|
||||||
|
$(hide) mkdir -p $(dir $@)
|
||||||
|
$(hide) zip -qj $@ $<
|
||||||
|
|
||||||
|
.PHONY: otacerts
|
||||||
|
otacerts: $(TARGET_OUT_ETC)/security/otacerts.zip
|
||||||
|
|
||||||
|
|
||||||
# #################################################################
|
# #################################################################
|
||||||
# Targets for user images
|
# Targets for user images
|
||||||
# #################################################################
|
# #################################################################
|
||||||
@@ -463,6 +497,95 @@ else
|
|||||||
INTERNAL_MKUSERFS := $(MKYAFFS2)
|
INTERNAL_MKUSERFS := $(MKYAFFS2)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
# Recovery image
|
||||||
|
|
||||||
|
# If neither TARGET_NO_KERNEL nor TARGET_NO_RECOVERY are true
|
||||||
|
ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
|
||||||
|
|
||||||
|
INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img
|
||||||
|
|
||||||
|
recovery_initrc := $(call include-path-for, recovery)/etc/init.rc
|
||||||
|
recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system
|
||||||
|
recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img
|
||||||
|
recovery_build_prop := $(INSTALLED_BUILD_PROP_TARGET)
|
||||||
|
recovery_binary := $(call intermediates-dir-for,EXECUTABLES,recovery)/recovery
|
||||||
|
recovery_resources_common := $(call include-path-for, recovery)/res
|
||||||
|
recovery_resources_private := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery/res))
|
||||||
|
recovery_resource_deps := $(shell find $(recovery_resources_common) \
|
||||||
|
$(recovery_resources_private) -type f)
|
||||||
|
|
||||||
|
ifeq ($(recovery_resources_private),)
|
||||||
|
$(info No private recovery resources for TARGET_DEVICE $(TARGET_DEVICE))
|
||||||
|
endif
|
||||||
|
|
||||||
|
INTERNAL_RECOVERYIMAGE_ARGS := \
|
||||||
|
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
|
||||||
|
--kernel $(recovery_kernel) \
|
||||||
|
--ramdisk $(recovery_ramdisk)
|
||||||
|
|
||||||
|
# Assumes this has already been stripped
|
||||||
|
ifdef BOARD_KERNEL_CMDLINE
|
||||||
|
INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
|
||||||
|
endif
|
||||||
|
ifdef BOARD_KERNEL_BASE
|
||||||
|
INTERNAL_RECOVERYIMAGE_ARGS += --base $(BOARD_KERNEL_BASE)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Keys authorized to sign OTA packages this build will accept. The
|
||||||
|
# build always uses test-keys for this; release packaging tools will
|
||||||
|
# substitute other keys for this one.
|
||||||
|
OTA_PUBLIC_KEYS := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
|
||||||
|
|
||||||
|
# Generate a file containing the keys that will be read by the
|
||||||
|
# recovery binary.
|
||||||
|
RECOVERY_INSTALL_OTA_KEYS := \
|
||||||
|
$(call intermediates-dir-for,PACKAGING,ota_keys)/keys
|
||||||
|
DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
|
||||||
|
$(RECOVERY_INSTALL_OTA_KEYS): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
|
||||||
|
$(RECOVERY_INSTALL_OTA_KEYS): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR)
|
||||||
|
@echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS)"
|
||||||
|
@rm -rf $@
|
||||||
|
@mkdir -p $(dir $@)
|
||||||
|
java -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) > $@
|
||||||
|
|
||||||
|
$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
|
||||||
|
$(INSTALLED_RAMDISK_TARGET) \
|
||||||
|
$(INSTALLED_BOOTIMAGE_TARGET) \
|
||||||
|
$(recovery_binary) \
|
||||||
|
$(recovery_initrc) $(recovery_kernel) \
|
||||||
|
$(INSTALLED_2NDBOOTLOADER_TARGET) \
|
||||||
|
$(recovery_build_prop) $(recovery_resource_deps) \
|
||||||
|
$(RECOVERY_INSTALL_OTA_KEYS)
|
||||||
|
@echo ----- Making recovery image ------
|
||||||
|
rm -rf $(TARGET_RECOVERY_OUT)
|
||||||
|
mkdir -p $(TARGET_RECOVERY_OUT)
|
||||||
|
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)
|
||||||
|
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc
|
||||||
|
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/tmp
|
||||||
|
echo Copying baseline ramdisk...
|
||||||
|
cp -R $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
|
||||||
|
echo Modifying ramdisk contents...
|
||||||
|
cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/
|
||||||
|
cp -f $(recovery_binary) $(TARGET_RECOVERY_ROOT_OUT)/sbin/
|
||||||
|
cp -rf $(recovery_resources_common) $(TARGET_RECOVERY_ROOT_OUT)/
|
||||||
|
$(foreach item,$(recovery_resources_private), \
|
||||||
|
cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
|
||||||
|
cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/res/keys
|
||||||
|
cat $(INSTALLED_DEFAULT_PROP_TARGET) $(recovery_build_prop) \
|
||||||
|
> $(TARGET_RECOVERY_ROOT_OUT)/default.prop
|
||||||
|
$(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
|
||||||
|
$(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) --output $@
|
||||||
|
@echo ----- Made recovery image -------- $@
|
||||||
|
$(hide) $(call assert-max-file-size,$@,$(BOARD_RECOVERYIMAGE_MAX_SIZE))
|
||||||
|
|
||||||
|
else
|
||||||
|
INSTALLED_RECOVERYIMAGE_TARGET :=
|
||||||
|
endif
|
||||||
|
|
||||||
|
.PHONY: recoveryimage
|
||||||
|
recoveryimage: $(INSTALLED_RECOVERYIMAGE_TARGET)
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# system yaffs image
|
# system yaffs image
|
||||||
#
|
#
|
||||||
@@ -518,10 +641,10 @@ else
|
|||||||
SYSTEMIMAGE_SOURCE_DIR := $(TARGET_OUT)
|
SYSTEMIMAGE_SOURCE_DIR := $(TARGET_OUT)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) | $(ACP)
|
$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ACP)
|
||||||
@echo "Install system fs image: $@"
|
@echo "Install system fs image: $@"
|
||||||
$(copy-file-to-target)
|
$(copy-file-to-target)
|
||||||
$(hide) $(call assert-max-file-size,$@,$(BOARD_SYSTEMIMAGE_MAX_SIZE))
|
$(hide) $(call assert-max-file-size,$@ $(INSTALLED_RECOVERYIMAGE_TARGET),$(BOARD_SYSTEMIMAGE_MAX_SIZE))
|
||||||
|
|
||||||
systemimage: $(INSTALLED_SYSTEMIMAGE)
|
systemimage: $(INSTALLED_SYSTEMIMAGE)
|
||||||
|
|
||||||
@@ -614,72 +737,6 @@ userdatatarball-nodeps: $(FS_GET_STATS)
|
|||||||
$(build-userdatatarball-target)
|
$(build-userdatatarball-target)
|
||||||
|
|
||||||
|
|
||||||
# If neither TARGET_NO_KERNEL nor TARGET_NO_RECOVERY are true
|
|
||||||
ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
|
||||||
# Recovery image
|
|
||||||
INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img
|
|
||||||
|
|
||||||
recovery_initrc := $(call include-path-for, recovery)/etc/init.rc
|
|
||||||
recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system
|
|
||||||
recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img
|
|
||||||
recovery_build_prop := $(INSTALLED_BUILD_PROP_TARGET)
|
|
||||||
recovery_binary := $(call intermediates-dir-for,EXECUTABLES,recovery)/recovery
|
|
||||||
recovery_resources_common := $(call include-path-for, recovery)/res
|
|
||||||
recovery_resources_private := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery/res))
|
|
||||||
recovery_resource_deps := $(shell find $(recovery_resources_common) \
|
|
||||||
$(recovery_resources_private) -type f)
|
|
||||||
|
|
||||||
ifeq ($(recovery_resources_private),)
|
|
||||||
$(info No private recovery resources for TARGET_DEVICE $(TARGET_DEVICE))
|
|
||||||
endif
|
|
||||||
|
|
||||||
INTERNAL_RECOVERYIMAGE_ARGS := \
|
|
||||||
$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
|
|
||||||
--kernel $(recovery_kernel) \
|
|
||||||
--ramdisk $(recovery_ramdisk)
|
|
||||||
|
|
||||||
# Assumes this has already been stripped
|
|
||||||
ifdef BOARD_KERNEL_CMDLINE
|
|
||||||
INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(BOARD_KERNEL_CMDLINE)"
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) \
|
|
||||||
$(INSTALLED_RAMDISK_TARGET) \
|
|
||||||
$(INSTALLED_BOOTIMAGE_TARGET) \
|
|
||||||
$(recovery_binary) \
|
|
||||||
$(recovery_initrc) $(recovery_kernel) \
|
|
||||||
$(INSTALLED_2NDBOOTLOADER_TARGET) \
|
|
||||||
$(recovery_build_prop) $(recovery_resource_deps)
|
|
||||||
@echo ----- Making recovery image ------
|
|
||||||
rm -rf $(TARGET_RECOVERY_OUT)
|
|
||||||
mkdir -p $(TARGET_RECOVERY_OUT)
|
|
||||||
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)
|
|
||||||
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc
|
|
||||||
mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/tmp
|
|
||||||
echo Copying baseline ramdisk...
|
|
||||||
cp -R $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
|
|
||||||
echo Modifying ramdisk contents...
|
|
||||||
cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/
|
|
||||||
cp -f $(recovery_binary) $(TARGET_RECOVERY_ROOT_OUT)/sbin/
|
|
||||||
cp -rf $(recovery_resources_common) $(TARGET_RECOVERY_ROOT_OUT)/
|
|
||||||
$(foreach item,$(recovery_resources_private), \
|
|
||||||
cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
|
|
||||||
cat $(INSTALLED_DEFAULT_PROP_TARGET) $(recovery_build_prop) \
|
|
||||||
> $(TARGET_RECOVERY_ROOT_OUT)/default.prop
|
|
||||||
$(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | gzip > $(recovery_ramdisk)
|
|
||||||
$(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) --output $@
|
|
||||||
@echo ----- Made recovery image -------- $@
|
|
||||||
$(hide) $(call assert-max-file-size,$@,$(BOARD_RECOVERYIMAGE_MAX_SIZE))
|
|
||||||
|
|
||||||
else
|
|
||||||
INSTALLED_RECOVERYIMAGE_TARGET :=
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: recoveryimage
|
|
||||||
recoveryimage: $(INSTALLED_RECOVERYIMAGE_TARGET)
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# bring in the installer image generation defines if necessary
|
# bring in the installer image generation defines if necessary
|
||||||
ifeq ($(TARGET_USE_DISKINSTALLER),true)
|
ifeq ($(TARGET_USE_DISKINSTALLER),true)
|
||||||
@@ -687,123 +744,20 @@ include bootable/diskinstaller/config.mk
|
|||||||
endif
|
endif
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# OTA update package
|
# host tools needed to build OTA packages
|
||||||
name := $(TARGET_PRODUCT)
|
|
||||||
ifeq ($(TARGET_BUILD_TYPE),debug)
|
|
||||||
name := $(name)_debug
|
|
||||||
endif
|
|
||||||
name := $(name)-ota-$(FILE_NAME_TAG)
|
|
||||||
|
|
||||||
INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
|
.PHONY: otatools
|
||||||
INTERNAL_OTA_INTERMEDIATES_DIR := $(call intermediates-dir-for,PACKAGING,ota)
|
otatools: $(HOST_OUT_EXECUTABLES)/minigzip \
|
||||||
|
$(HOST_OUT_EXECUTABLES)/mkbootfs \
|
||||||
# If neither TARGET_NO_KERNEL nor TARGET_NO_RECOVERY are true
|
$(HOST_OUT_EXECUTABLES)/mkbootimg \
|
||||||
ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY)))
|
$(HOST_OUT_EXECUTABLES)/fs_config \
|
||||||
INTERNAL_OTA_RECOVERYIMAGE_TARGET := $(INTERNAL_OTA_INTERMEDIATES_DIR)/system/recovery.img
|
$(HOST_OUT_EXECUTABLES)/mkyaffs2image \
|
||||||
else
|
$(HOST_OUT_EXECUTABLES)/zipalign \
|
||||||
INTERNAL_OTA_RECOVERYIMAGE_TARGET :=
|
$(HOST_OUT_EXECUTABLES)/aapt \
|
||||||
endif
|
$(HOST_OUT_EXECUTABLES)/bsdiff \
|
||||||
INTERNAL_OTA_SCRIPT_TARGET := $(INTERNAL_OTA_INTERMEDIATES_DIR)/META-INF/com/google/android/update-script
|
$(HOST_OUT_EXECUTABLES)/imgdiff \
|
||||||
|
$(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar \
|
||||||
# Sign OTA packages with the test key by default.
|
$(HOST_OUT_JAVA_LIBRARIES)/signapk.jar
|
||||||
# Actual product deliverables will be re-signed by hand.
|
|
||||||
private_key := $(SRC_TARGET_DIR)/product/security/testkey.pk8
|
|
||||||
certificate := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
|
|
||||||
$(INTERNAL_OTA_PACKAGE_TARGET): $(private_key) $(certificate) $(SIGNAPK_JAR)
|
|
||||||
$(INTERNAL_OTA_PACKAGE_TARGET): PRIVATE_PRIVATE_KEY := $(private_key)
|
|
||||||
$(INTERNAL_OTA_PACKAGE_TARGET): PRIVATE_CERTIFICATE := $(certificate)
|
|
||||||
|
|
||||||
# Depending on INSTALLED_SYSTEMIMAGE guarantees that SYSTEMIMAGE_SOURCE_DIR
|
|
||||||
# is up-to-date. We use jar instead of zip so that we can use the -C
|
|
||||||
# switch to avoid cd-ing all over the place.
|
|
||||||
# TODO: Make our own jar-creation tool to avoid all these shenanigans.
|
|
||||||
$(INTERNAL_OTA_PACKAGE_TARGET): \
|
|
||||||
$(INTERNAL_OTA_SCRIPT_TARGET) \
|
|
||||||
$(INTERNAL_OTA_RECOVERYIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_BOOTIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_RADIOIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
|
|
||||||
$(INSTALLED_SYSTEMIMAGE)
|
|
||||||
@echo "Package OTA: $@"
|
|
||||||
$(hide) rm -rf $@
|
|
||||||
$(hide) jar cf $@ \
|
|
||||||
$(foreach item, \
|
|
||||||
$(INSTALLED_BOOTIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_RADIOIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET), \
|
|
||||||
-C $(dir $(item)) $(notdir $(item))) \
|
|
||||||
-C $(INTERNAL_OTA_INTERMEDIATES_DIR) .
|
|
||||||
$(hide) find $(SYSTEMIMAGE_SOURCE_DIR) -type f -print | \
|
|
||||||
sed 's|^$(dir $(SYSTEMIMAGE_SOURCE_DIR))|-C & |' | \
|
|
||||||
xargs jar uf $@
|
|
||||||
$(hide) if jar tf $@ | egrep '.{65}' >&2; then \
|
|
||||||
echo "Path too long (>64 chars) for OTA update" >&2; \
|
|
||||||
exit 1; \
|
|
||||||
fi
|
|
||||||
$(sign-package)
|
|
||||||
|
|
||||||
$(INTERNAL_OTA_SCRIPT_TARGET): \
|
|
||||||
$(HOST_OUT_EXECUTABLES)/make-update-script \
|
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
|
|
||||||
$(INSTALLED_SYSTEMIMAGE)
|
|
||||||
@mkdir -p $(dir $@)
|
|
||||||
@rm -rf $@
|
|
||||||
@echo "Update script: $@"
|
|
||||||
$(hide) TARGET_DEVICE=$(TARGET_DEVICE) \
|
|
||||||
$< $(SYSTEMIMAGE_SOURCE_DIR) \
|
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
|
|
||||||
> $@
|
|
||||||
|
|
||||||
ifneq (,$(INTERNAL_OTA_RECOVERYIMAGE_TARGET))
|
|
||||||
# This copy is so recovery.img can be in /system within the OTA package.
|
|
||||||
# That way it gets installed into the system image, which in turn installs it.
|
|
||||||
$(INTERNAL_OTA_RECOVERYIMAGE_TARGET): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ACP)
|
|
||||||
@mkdir -p $(dir $@)
|
|
||||||
$(hide) $(ACP) $< $@
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: otapackage
|
|
||||||
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
|
|
||||||
|
|
||||||
# Keys authorized to sign OTA packages this build will accept.
|
|
||||||
ifeq ($(INCLUDE_TEST_OTA_KEYS),true)
|
|
||||||
OTA_PUBLIC_KEYS := \
|
|
||||||
$(sort $(SRC_TARGET_DIR)/product/security/testkey.x509.pem $(OTA_PUBLIC_KEYS))
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(OTA_PUBLIC_KEYS),)
|
|
||||||
$(error No OTA_PUBLIC_KEYS defined)
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Build a keystore with the authorized keys in it.
|
|
||||||
# java/android/android/server/checkin/UpdateVerifier.java uses this.
|
|
||||||
ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/security/otacerts.zip
|
|
||||||
$(TARGET_OUT_ETC)/security/otacerts.zip: $(OTA_PUBLIC_KEYS)
|
|
||||||
$(hide) rm -f $@
|
|
||||||
$(hide) mkdir -p $(dir $@)
|
|
||||||
zip -qj $@ $(OTA_PUBLIC_KEYS)
|
|
||||||
|
|
||||||
# The device does not support JKS.
|
|
||||||
# $(hide) for f in $(OTA_PUBLIC_KEYS); do \
|
|
||||||
# echo "keytool: $@ <= $$f" && \
|
|
||||||
# keytool -keystore $@ -storepass $(notdir $@) -noprompt \
|
|
||||||
# -import -file $$f -alias $(notdir $$f) || exit 1; \
|
|
||||||
# done
|
|
||||||
|
|
||||||
ifdef RECOVERY_INSTALL_OTA_KEYS_INC
|
|
||||||
# Generate a C-includable file containing the keys.
|
|
||||||
# RECOVERY_INSTALL_OTA_KEYS_INC is defined by recovery/Android.mk.
|
|
||||||
# *** THIS IS A TOTAL HACK; EXECUTABLES MUST NOT CHANGE BETWEEN DIFFERENT
|
|
||||||
# PRODUCTS/BUILD TYPES. ***
|
|
||||||
# TODO: make recovery read the keys from an external file.
|
|
||||||
DUMPKEY_JAR := $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
|
|
||||||
$(RECOVERY_INSTALL_OTA_KEYS_INC): PRIVATE_OTA_PUBLIC_KEYS := $(OTA_PUBLIC_KEYS)
|
|
||||||
$(RECOVERY_INSTALL_OTA_KEYS_INC): $(OTA_PUBLIC_KEYS) $(DUMPKEY_JAR)
|
|
||||||
@echo "DumpPublicKey: $@ <= $(PRIVATE_OTA_PUBLIC_KEYS)"
|
|
||||||
@rm -rf $@
|
|
||||||
@mkdir -p $(dir $@)
|
|
||||||
$(hide) java -jar $(DUMPKEY_JAR) $(PRIVATE_OTA_PUBLIC_KEYS) > $@
|
|
||||||
endif
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# A zip of the directories that map to the target filesystem.
|
# A zip of the directories that map to the target filesystem.
|
||||||
@@ -833,21 +787,22 @@ define package_files-copy-root
|
|||||||
endef
|
endef
|
||||||
|
|
||||||
built_ota_tools := \
|
built_ota_tools := \
|
||||||
$(call intermediates-dir-for,EXECUTABLES,applypatch)/applypatch \
|
$(call intermediates-dir-for,EXECUTABLES,applypatch)/applypatch \
|
||||||
$(call intermediates-dir-for,EXECUTABLES,check_prereq)/check_prereq
|
$(call intermediates-dir-for,EXECUTABLES,check_prereq)/check_prereq \
|
||||||
|
$(call intermediates-dir-for,EXECUTABLES,updater)/updater
|
||||||
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
|
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
|
||||||
|
|
||||||
|
$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_API_VERSION := $(RECOVERY_API_VERSION)
|
||||||
|
|
||||||
# Depending on the various images guarantees that the underlying
|
# Depending on the various images guarantees that the underlying
|
||||||
# directories are up-to-date.
|
# directories are up-to-date.
|
||||||
$(BUILT_TARGET_FILES_PACKAGE): \
|
$(BUILT_TARGET_FILES_PACKAGE): \
|
||||||
$(INTERNAL_OTA_SCRIPT_TARGET) \
|
|
||||||
$(INSTALLED_BOOTIMAGE_TARGET) \
|
$(INSTALLED_BOOTIMAGE_TARGET) \
|
||||||
$(INSTALLED_RADIOIMAGE_TARGET) \
|
$(INSTALLED_RADIOIMAGE_TARGET) \
|
||||||
$(INSTALLED_RECOVERYIMAGE_TARGET) \
|
$(INSTALLED_RECOVERYIMAGE_TARGET) \
|
||||||
$(BUILT_SYSTEMIMAGE) \
|
$(INSTALLED_SYSTEMIMAGE) \
|
||||||
$(INSTALLED_USERDATAIMAGE_TARGET) \
|
$(INSTALLED_USERDATAIMAGE_TARGET) \
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
|
$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
|
||||||
$(INTERNAL_OTA_SCRIPT_TARGET) \
|
|
||||||
$(built_ota_tools) \
|
$(built_ota_tools) \
|
||||||
$(APKCERTS_FILE) \
|
$(APKCERTS_FILE) \
|
||||||
| $(ACP)
|
| $(ACP)
|
||||||
@@ -895,19 +850,53 @@ endif
|
|||||||
$(TARGET_OUT_DATA),$(zip_root)/DATA)
|
$(TARGET_OUT_DATA),$(zip_root)/DATA)
|
||||||
@# Extra contents of the OTA package
|
@# Extra contents of the OTA package
|
||||||
$(hide) mkdir -p $(zip_root)/OTA/bin
|
$(hide) mkdir -p $(zip_root)/OTA/bin
|
||||||
$(hide) $(call package_files-copy-root, \
|
|
||||||
$(INTERNAL_OTA_INTERMEDIATES_DIR),$(zip_root)/OTA)
|
|
||||||
$(hide) $(ACP) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
|
$(hide) $(ACP) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
|
||||||
$(hide) $(ACP) $(PRIVATE_OTA_TOOLS) $(zip_root)/OTA/bin/
|
$(hide) $(ACP) $(PRIVATE_OTA_TOOLS) $(zip_root)/OTA/bin/
|
||||||
@# Files that don't end up in any images, but are necessary to
|
@# Files that do not end up in any images, but are necessary to
|
||||||
@# build them.
|
@# build them.
|
||||||
$(hide) mkdir -p $(zip_root)/META
|
$(hide) mkdir -p $(zip_root)/META
|
||||||
$(hide) $(ACP) $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
|
$(hide) $(ACP) $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
|
||||||
|
$(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
|
||||||
|
$(hide) echo "$(PRIVATE_RECOVERY_API_VERSION)" > $(zip_root)/META/recovery-api-version.txt
|
||||||
|
$(hide) echo "blocksize $(BOARD_FLASH_BLOCK_SIZE)" > $(zip_root)/META/imagesizes.txt
|
||||||
|
$(hide) echo "boot $(BOARD_BOOTIMAGE_MAX_SIZE)" >> $(zip_root)/META/imagesizes.txt
|
||||||
|
$(hide) echo "recovery $(BOARD_RECOVERYIMAGE_MAX_SIZE)" >> $(zip_root)/META/imagesizes.txt
|
||||||
|
$(hide) echo "system $(BOARD_SYSTEMIMAGE_MAX_SIZE)" >> $(zip_root)/META/imagesizes.txt
|
||||||
|
$(hide) echo "userdata $(BOARD_USERDATAIMAGE_MAX_SIZE)" >> $(zip_root)/META/imagesizes.txt
|
||||||
@# Zip everything up, preserving symlinks
|
@# Zip everything up, preserving symlinks
|
||||||
$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
|
$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
|
||||||
|
|
||||||
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
|
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------
|
||||||
|
# OTA update package
|
||||||
|
|
||||||
|
ifneq ($(TARGET_SIMULATOR),true)
|
||||||
|
ifneq ($(TARGET_PRODUCT),sdk)
|
||||||
|
|
||||||
|
name := $(TARGET_PRODUCT)
|
||||||
|
ifeq ($(TARGET_BUILD_TYPE),debug)
|
||||||
|
name := $(name)_debug
|
||||||
|
endif
|
||||||
|
name := $(name)-ota-$(FILE_NAME_TAG)
|
||||||
|
|
||||||
|
INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
|
||||||
|
|
||||||
|
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
|
||||||
|
|
||||||
|
$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) otatools
|
||||||
|
@echo "Package OTA: $@"
|
||||||
|
$(hide) ./build/tools/releasetools/ota_from_target_files \
|
||||||
|
-p $(HOST_OUT) \
|
||||||
|
-k $(KEY_CERT_PAIR) \
|
||||||
|
$(BUILT_TARGET_FILES_PACKAGE) $@
|
||||||
|
|
||||||
|
.PHONY: otapackage
|
||||||
|
otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
|
||||||
|
|
||||||
|
endif # TARGET_PRODUCT != sdk
|
||||||
|
endif # TARGET_SIMULATOR != true
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# installed file list
|
# installed file list
|
||||||
# Depending on $(INSTALLED_SYSTEMIMAGE) ensures that it
|
# Depending on $(INSTALLED_SYSTEMIMAGE) ensures that it
|
||||||
@@ -1009,14 +998,8 @@ dalvikfiles: $(INTERNAL_DALVIK_MODULES)
|
|||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# The update package
|
# The update package
|
||||||
|
|
||||||
INTERNAL_UPDATE_PACKAGE_FILES += \
|
ifneq ($(TARGET_SIMULATOR),true)
|
||||||
$(INSTALLED_BOOTIMAGE_TARGET) \
|
ifneq ($(TARGET_PRODUCT),sdk)
|
||||||
$(INSTALLED_RECOVERYIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_SYSTEMIMAGE) \
|
|
||||||
$(INSTALLED_USERDATAIMAGE_TARGET) \
|
|
||||||
$(INSTALLED_ANDROID_INFO_TXT_TARGET)
|
|
||||||
|
|
||||||
ifneq ($(strip $(INTERNAL_UPDATE_PACKAGE_FILES)),)
|
|
||||||
|
|
||||||
name := $(TARGET_PRODUCT)
|
name := $(TARGET_PRODUCT)
|
||||||
ifeq ($(TARGET_BUILD_TYPE),debug)
|
ifeq ($(TARGET_BUILD_TYPE),debug)
|
||||||
@@ -1026,13 +1009,17 @@ name := $(name)-img-$(FILE_NAME_TAG)
|
|||||||
|
|
||||||
INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
|
INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
|
||||||
|
|
||||||
$(INTERNAL_UPDATE_PACKAGE_TARGET): $(INTERNAL_UPDATE_PACKAGE_FILES)
|
$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) otatools
|
||||||
@echo "Package: $@"
|
@echo "Package: $@"
|
||||||
$(hide) zip -qj $@ $(INTERNAL_UPDATE_PACKAGE_FILES)
|
$(hide) ./build/tools/releasetools/img_from_target_files \
|
||||||
|
-p $(HOST_OUT) \
|
||||||
|
$(BUILT_TARGET_FILES_PACKAGE) $@
|
||||||
|
|
||||||
else
|
.PHONY: updatepackage
|
||||||
INTERNAL_UPDATE_PACKAGE_TARGET :=
|
updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
|
||||||
endif
|
|
||||||
|
endif # TARGET_PRODUCT != sdk
|
||||||
|
endif # TARGET_SIMULATOR != true
|
||||||
|
|
||||||
# -----------------------------------------------------------------
|
# -----------------------------------------------------------------
|
||||||
# The emulator package
|
# The emulator package
|
||||||
@@ -1116,7 +1103,9 @@ deps := \
|
|||||||
$(target_notice_file_txt) \
|
$(target_notice_file_txt) \
|
||||||
$(tools_notice_file_txt) \
|
$(tools_notice_file_txt) \
|
||||||
$(OUT_DOCS)/offline-sdk-timestamp \
|
$(OUT_DOCS)/offline-sdk-timestamp \
|
||||||
$(INTERNAL_UPDATE_PACKAGE_TARGET) \
|
$(INSTALLED_SYSTEMIMAGE) \
|
||||||
|
$(INSTALLED_USERDATAIMAGE_TARGET) \
|
||||||
|
$(INSTALLED_RAMDISK_TARGET) \
|
||||||
$(INSTALLED_SDK_BUILD_PROP_TARGET) \
|
$(INSTALLED_SDK_BUILD_PROP_TARGET) \
|
||||||
$(ATREE_FILES) \
|
$(ATREE_FILES) \
|
||||||
$(atree_dir)/sdk.atree \
|
$(atree_dir)/sdk.atree \
|
||||||
|
@@ -267,7 +267,6 @@ full_static_java_libs := \
|
|||||||
JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE))/javalib.jar)
|
JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE))/javalib.jar)
|
||||||
|
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_INSTALL_DIR := $(dir $(LOCAL_INSTALLED_MODULE))
|
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_INSTALL_DIR := $(dir $(LOCAL_INSTALLED_MODULE))
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_INTERMEDIATES_DIR := $(intermediates)
|
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates)/classes
|
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates)/classes
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates)/src
|
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates)/src
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
|
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
|
||||||
@@ -357,6 +356,8 @@ $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_ALL_JAVA_LIBRARIES:= $(full_java_libs)
|
|||||||
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_IS_HOST_MODULE := $(LOCAL_IS_HOST_MODULE)
|
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_IS_HOST_MODULE := $(LOCAL_IS_HOST_MODULE)
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_HOST:= $(my_host)
|
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_HOST:= $(my_host)
|
||||||
|
|
||||||
|
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_INTERMEDIATES_DIR:= $(intermediates)
|
||||||
|
|
||||||
# Tell the module and all of its sub-modules who it is.
|
# Tell the module and all of its sub-modules who it is.
|
||||||
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_MODULE:= $(LOCAL_MODULE)
|
$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_MODULE:= $(LOCAL_MODULE)
|
||||||
|
|
||||||
|
@@ -47,11 +47,11 @@ LOCAL_ARM_MODE := $(strip $(LOCAL_ARM_MODE))
|
|||||||
arm_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),arm)
|
arm_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),arm)
|
||||||
normal_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),thumb)
|
normal_objects_mode := $(if $(LOCAL_ARM_MODE),$(LOCAL_ARM_MODE),thumb)
|
||||||
|
|
||||||
# Read the values from something like TARGET_arm_release_CFLAGS or
|
# Read the values from something like TARGET_arm_CFLAGS or
|
||||||
# TARGET_thumb_debug_CFLAGS. HOST_(arm|thumb)_(release|debug)_CFLAGS
|
# TARGET_thumb_CFLAGS. HOST_(arm|thumb)_CFLAGS values aren't
|
||||||
# values aren't actually used (although they are usually empty).
|
# actually used (although they are usually empty).
|
||||||
arm_objects_cflags := $($(my_prefix)$(arm_objects_mode)_$($(my_prefix)BUILD_TYPE)_CFLAGS)
|
arm_objects_cflags := $($(my_prefix)$(arm_objects_mode)_CFLAGS)
|
||||||
normal_objects_cflags := $($(my_prefix)$(normal_objects_mode)_$($(my_prefix)BUILD_TYPE)_CFLAGS)
|
normal_objects_cflags := $($(my_prefix)$(normal_objects_mode)_CFLAGS)
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
## Define per-module debugging flags. Users can turn on
|
## Define per-module debugging flags. Users can turn on
|
||||||
@@ -211,6 +211,19 @@ $(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c $(yacc_cpps) $(PR
|
|||||||
-include $(c_objects:%.o=%.P)
|
-include $(c_objects:%.o=%.P)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
## ObjC: Compile .m files to .o
|
||||||
|
###########################################################
|
||||||
|
|
||||||
|
objc_sources := $(filter %.m,$(LOCAL_SRC_FILES))
|
||||||
|
objc_objects := $(addprefix $(intermediates)/,$(objc_sources:.m=.o))
|
||||||
|
|
||||||
|
ifneq ($(strip $(objc_objects)),)
|
||||||
|
$(objc_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.m $(yacc_cpps) $(PRIVATE_ADDITIONAL_DEPENDENCIES)
|
||||||
|
$(transform-$(PRIVATE_HOST)m-to-o)
|
||||||
|
-include $(objc_objects:%.o=%.P)
|
||||||
|
endif
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
## AS: Compile .S files to .o.
|
## AS: Compile .S files to .o.
|
||||||
###########################################################
|
###########################################################
|
||||||
|
@@ -23,7 +23,7 @@
|
|||||||
# (like "TC1-RC5"). It must be a single word, and is
|
# (like "TC1-RC5"). It must be a single word, and is
|
||||||
# capitalized by convention.
|
# capitalized by convention.
|
||||||
#
|
#
|
||||||
BUILD_ID := CUPCAKE
|
BUILD_ID := Donut
|
||||||
|
|
||||||
# DISPLAY_BUILD_NUMBER should only be set for development branches,
|
# DISPLAY_BUILD_NUMBER should only be set for development branches,
|
||||||
# If set, the BUILD_NUMBER (cl) is appended to the BUILD_ID for
|
# If set, the BUILD_NUMBER (cl) is appended to the BUILD_ID for
|
||||||
|
@@ -73,40 +73,39 @@ $(combo_target)LD := $($(combo_target)TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
|
|||||||
|
|
||||||
$(combo_target)NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
|
$(combo_target)NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
|
||||||
|
|
||||||
TARGET_arm_release_CFLAGS := -O2 \
|
TARGET_arm_CFLAGS := -O2 \
|
||||||
-fomit-frame-pointer \
|
-fomit-frame-pointer \
|
||||||
-fstrict-aliasing \
|
-fstrict-aliasing \
|
||||||
-funswitch-loops \
|
-funswitch-loops \
|
||||||
-finline-limit=300
|
-finline-limit=300
|
||||||
|
|
||||||
# Modules can choose to compile some source as thumb. As
|
# Modules can choose to compile some source as thumb. As
|
||||||
# non-thumb enabled targets are supported, this is treated
|
# non-thumb enabled targets are supported, this is treated
|
||||||
# as a 'hint'. If thumb is not enabled, these files are just
|
# as a 'hint'. If thumb is not enabled, these files are just
|
||||||
# compiled as ARM.
|
# compiled as ARM.
|
||||||
ifeq ($(ARCH_ARM_HAVE_THUMB_SUPPORT),true)
|
ifeq ($(ARCH_ARM_HAVE_THUMB_SUPPORT),true)
|
||||||
TARGET_thumb_release_CFLAGS := -mthumb \
|
TARGET_thumb_CFLAGS := -mthumb \
|
||||||
-Os \
|
-Os \
|
||||||
-fomit-frame-pointer \
|
-fomit-frame-pointer \
|
||||||
-fno-strict-aliasing \
|
-fno-strict-aliasing \
|
||||||
-finline-limit=64
|
-finline-limit=64
|
||||||
else
|
else
|
||||||
TARGET_thumb_release_CFLAGS := $(TARGET_arm_release_CFLAGS)
|
TARGET_thumb_CFLAGS := $(TARGET_arm_CFLAGS)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# When building for debug, compile everything as arm.
|
# Set FORCE_ARM_DEBUGGING to "true" in your buildspec.mk
|
||||||
TARGET_arm_debug_CFLAGS := $(TARGET_arm_release_CFLAGS) -fno-omit-frame-pointer -fno-strict-aliasing
|
# or in your environment to force a full arm build, even for
|
||||||
TARGET_thumb_debug_CFLAGS := $(TARGET_thumb_release_CFLAGS) -marm -fno-omit-frame-pointer
|
# files that are normally built as thumb; this can make
|
||||||
|
# gdb debugging easier. Don't forget to do a clean build.
|
||||||
# NOTE: if you try to build a debug build with thumb, several
|
#
|
||||||
|
# NOTE: if you try to build a -O0 build with thumb, several
|
||||||
# of the libraries (libpv, libwebcore, libkjs) need to be built
|
# of the libraries (libpv, libwebcore, libkjs) need to be built
|
||||||
# with -mlong-calls. When built at -O0, those libraries are
|
# with -mlong-calls. When built at -O0, those libraries are
|
||||||
# too big for a thumb "BL <label>" to go from one end to the other.
|
# too big for a thumb "BL <label>" to go from one end to the other.
|
||||||
|
ifeq ($(FORCE_ARM_DEBUGGING),true)
|
||||||
## As hopefully a temporary hack,
|
TARGET_arm_CFLAGS += -fno-omit-frame-pointer
|
||||||
## use this to force a full ARM build (for easier debugging in gdb)
|
TARGET_thumb_CFLAGS += -marm -fno-omit-frame-pointer
|
||||||
## (don't forget to do a clean build)
|
endif
|
||||||
##TARGET_arm_release_CFLAGS := $(TARGET_arm_release_CFLAGS) -fno-omit-frame-pointer
|
|
||||||
##TARGET_thumb_release_CFLAGS := $(TARGET_thumb_release_CFLAGS) -marm -fno-omit-frame-pointer
|
|
||||||
|
|
||||||
android_config_h := $(call select-android-config-h,linux-arm)
|
android_config_h := $(call select-android-config-h,linux-arm)
|
||||||
arch_include_dir := $(dir $(android_config_h))
|
arch_include_dir := $(dir $(android_config_h))
|
||||||
|
@@ -7,7 +7,6 @@
|
|||||||
# $(combo_target)OS -- standard name for this host (LINUX, DARWIN, etc.)
|
# $(combo_target)OS -- standard name for this host (LINUX, DARWIN, etc.)
|
||||||
# $(combo_target)ARCH -- standard name for process architecture (powerpc, x86, etc.)
|
# $(combo_target)ARCH -- standard name for process architecture (powerpc, x86, etc.)
|
||||||
# $(combo_target)GLOBAL_CFLAGS -- C compiler flags to use for everything
|
# $(combo_target)GLOBAL_CFLAGS -- C compiler flags to use for everything
|
||||||
# $(combo_target)DEBUG_CFLAGS -- additional C compiler flags for debug builds
|
|
||||||
# $(combo_target)RELEASE_CFLAGS -- additional C compiler flags for release builds
|
# $(combo_target)RELEASE_CFLAGS -- additional C compiler flags for release builds
|
||||||
# $(combo_target)GLOBAL_ARFLAGS -- flags to use for static linking everything
|
# $(combo_target)GLOBAL_ARFLAGS -- flags to use for static linking everything
|
||||||
# $(combo_target)SHLIB_SUFFIX -- suffix of shared libraries
|
# $(combo_target)SHLIB_SUFFIX -- suffix of shared libraries
|
||||||
@@ -39,7 +38,6 @@ $(combo_target)HAVE_KERNEL_MODULES := 0
|
|||||||
|
|
||||||
# These flags might (will) be overridden by the target makefiles
|
# These flags might (will) be overridden by the target makefiles
|
||||||
$(combo_target)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar
|
$(combo_target)GLOBAL_CFLAGS := -fno-exceptions -Wno-multichar
|
||||||
$(combo_target)DEBUG_CFLAGS := -O0 -g
|
|
||||||
$(combo_target)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing
|
$(combo_target)RELEASE_CFLAGS := -O2 -g -fno-strict-aliasing
|
||||||
$(combo_target)GLOBAL_ARFLAGS := crs
|
$(combo_target)GLOBAL_ARFLAGS := crs
|
||||||
|
|
||||||
|
@@ -76,11 +76,9 @@ SHOW_COMMANDS:= $(filter showcommands,$(MAKECMDGOALS))
|
|||||||
|
|
||||||
# These can be changed to modify both host and device modules.
|
# These can be changed to modify both host and device modules.
|
||||||
COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused
|
COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused
|
||||||
COMMON_DEBUG_CFLAGS:=
|
|
||||||
COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
|
COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
|
||||||
|
|
||||||
COMMON_GLOBAL_CPPFLAGS:=
|
COMMON_GLOBAL_CPPFLAGS:=
|
||||||
COMMON_DEBUG_CPPFLAGS:=
|
|
||||||
COMMON_RELEASE_CPPFLAGS:=
|
COMMON_RELEASE_CPPFLAGS:=
|
||||||
|
|
||||||
# Set the extensions used for various packages
|
# Set the extensions used for various packages
|
||||||
@@ -158,6 +156,7 @@ AIDL := $(HOST_OUT_EXECUTABLES)/aidl$(HOST_EXECUTABLE_SUFFIX)
|
|||||||
ICUDATA := $(HOST_OUT_EXECUTABLES)/icudata$(HOST_EXECUTABLE_SUFFIX)
|
ICUDATA := $(HOST_OUT_EXECUTABLES)/icudata$(HOST_EXECUTABLE_SUFFIX)
|
||||||
SIGNAPK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
|
SIGNAPK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
|
||||||
MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
|
MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
|
||||||
|
MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
|
||||||
MKBOOTIMG := $(HOST_OUT_EXECUTABLES)/mkbootimg$(HOST_EXECUTABLE_SUFFIX)
|
MKBOOTIMG := $(HOST_OUT_EXECUTABLES)/mkbootimg$(HOST_EXECUTABLE_SUFFIX)
|
||||||
MKYAFFS2 := $(HOST_OUT_EXECUTABLES)/mkyaffs2image$(HOST_EXECUTABLE_SUFFIX)
|
MKYAFFS2 := $(HOST_OUT_EXECUTABLES)/mkyaffs2image$(HOST_EXECUTABLE_SUFFIX)
|
||||||
APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX)
|
APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX)
|
||||||
@@ -227,19 +226,15 @@ endif
|
|||||||
# ###############################################################
|
# ###############################################################
|
||||||
|
|
||||||
HOST_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
|
HOST_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
|
||||||
HOST_DEBUG_CFLAGS += $(COMMON_DEBUG_CFLAGS)
|
|
||||||
HOST_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
|
HOST_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
|
||||||
|
|
||||||
HOST_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
|
HOST_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
|
||||||
HOST_DEBUG_CPPFLAGS += $(COMMON_DEBUG_CPPFLAGS)
|
|
||||||
HOST_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
|
HOST_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
|
||||||
|
|
||||||
TARGET_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
|
TARGET_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
|
||||||
TARGET_DEBUG_CFLAGS += $(COMMON_DEBUG_CFLAGS)
|
|
||||||
TARGET_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
|
TARGET_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
|
||||||
|
|
||||||
TARGET_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
|
TARGET_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
|
||||||
TARGET_DEBUG_CPPFLAGS += $(COMMON_DEBUG_CPPFLAGS)
|
|
||||||
TARGET_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
|
TARGET_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
|
||||||
|
|
||||||
HOST_GLOBAL_LD_DIRS += -L$(HOST_OUT_INTERMEDIATE_LIBRARIES)
|
HOST_GLOBAL_LD_DIRS += -L$(HOST_OUT_INTERMEDIATE_LIBRARIES)
|
||||||
@@ -250,7 +245,7 @@ TARGET_PROJECT_INCLUDES:= $(SRC_HEADERS) $(TARGET_OUT_HEADERS)
|
|||||||
|
|
||||||
# Many host compilers don't support these flags, so we have to make
|
# Many host compilers don't support these flags, so we have to make
|
||||||
# sure to only specify them for the target compilers checked in to
|
# sure to only specify them for the target compilers checked in to
|
||||||
# the source tree. The simulator uses the target flags but the
|
# the source tree. The simulator passes the target flags to the
|
||||||
# host compiler, so only set them for the target when the target
|
# host compiler, so only set them for the target when the target
|
||||||
# is not the simulator.
|
# is not the simulator.
|
||||||
ifneq ($(TARGET_SIMULATOR),true)
|
ifneq ($(TARGET_SIMULATOR),true)
|
||||||
@@ -258,21 +253,11 @@ TARGET_GLOBAL_CFLAGS += $(TARGET_ERROR_FLAGS)
|
|||||||
TARGET_GLOBAL_CPPFLAGS += $(TARGET_ERROR_FLAGS)
|
TARGET_GLOBAL_CPPFLAGS += $(TARGET_ERROR_FLAGS)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(HOST_BUILD_TYPE),release)
|
HOST_GLOBAL_CFLAGS += $(HOST_RELEASE_CFLAGS)
|
||||||
HOST_GLOBAL_CFLAGS+= $(HOST_RELEASE_CFLAGS)
|
HOST_GLOBAL_CPPFLAGS += $(HOST_RELEASE_CPPFLAGS)
|
||||||
HOST_GLOBAL_CPPFLAGS+= $(HOST_RELEASE_CPPFLAGS)
|
|
||||||
else
|
|
||||||
HOST_GLOBAL_CFLAGS+= $(HOST_DEBUG_CFLAGS)
|
|
||||||
HOST_GLOBAL_CPPFLAGS+= $(HOST_DEBUG_CPPFLAGS)
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(TARGET_BUILD_TYPE),release)
|
TARGET_GLOBAL_CFLAGS += $(TARGET_RELEASE_CFLAGS)
|
||||||
TARGET_GLOBAL_CFLAGS+= $(TARGET_RELEASE_CFLAGS)
|
TARGET_GLOBAL_CPPFLAGS += $(TARGET_RELEASE_CPPFLAGS)
|
||||||
TARGET_GLOBAL_CPPFLAGS+= $(TARGET_RELEASE_CPPFLAGS)
|
|
||||||
else
|
|
||||||
TARGET_GLOBAL_CFLAGS+= $(TARGET_DEBUG_CFLAGS)
|
|
||||||
TARGET_GLOBAL_CPPFLAGS+= $(TARGET_DEBUG_CPPFLAGS)
|
|
||||||
endif
|
|
||||||
|
|
||||||
# TODO: do symbol compression
|
# TODO: do symbol compression
|
||||||
TARGET_COMPRESS_MODULE_SYMBOLS := false
|
TARGET_COMPRESS_MODULE_SYMBOLS := false
|
||||||
@@ -290,7 +275,7 @@ PREBUILT_IS_PRESENT := $(if $(wildcard prebuilt/Android.mk),true)
|
|||||||
# The 'current' version is whatever this source tree is. Once the apicheck
|
# The 'current' version is whatever this source tree is. Once the apicheck
|
||||||
# tool can generate the stubs from the xml files, we'll use that to be
|
# tool can generate the stubs from the xml files, we'll use that to be
|
||||||
# able to build back-versions. In the meantime, 'current' is the only
|
# able to build back-versions. In the meantime, 'current' is the only
|
||||||
# one supported.
|
# one supported.
|
||||||
#
|
#
|
||||||
# sgrax is the opposite of xargs. It takes the list of args and puts them
|
# sgrax is the opposite of xargs. It takes the list of args and puts them
|
||||||
# on each line for sort to process.
|
# on each line for sort to process.
|
||||||
|
@@ -793,6 +793,22 @@ $(transform-s-to-o-no-deps)
|
|||||||
$(hide) $(transform-d-to-p)
|
$(hide) $(transform-d-to-p)
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
## Commands for running gcc to compile an Objective-C file
|
||||||
|
## This should never happen for target builds but this
|
||||||
|
## will error at build time.
|
||||||
|
###########################################################
|
||||||
|
|
||||||
|
define transform-m-to-o-no-deps
|
||||||
|
@echo "target ObjC: $(PRIVATE_MODULE) <= $<"
|
||||||
|
$(call transform-c-or-s-to-o-no-deps)
|
||||||
|
endef
|
||||||
|
|
||||||
|
define transform-m-to-o
|
||||||
|
$(transform-m-to-o-no-deps)
|
||||||
|
$(hide) $(transform-d-to-p)
|
||||||
|
endef
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
## Commands for running gcc to compile a host C++ file
|
## Commands for running gcc to compile a host C++ file
|
||||||
###########################################################
|
###########################################################
|
||||||
@@ -870,16 +886,46 @@ $(transform-host-s-to-o-no-deps)
|
|||||||
$(transform-d-to-p)
|
$(transform-d-to-p)
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
## Commands for running gcc to compile a host Objective-C file
|
||||||
|
###########################################################
|
||||||
|
|
||||||
|
define transform-host-m-to-o-no-deps
|
||||||
|
@echo "host ObjC: $(PRIVATE_MODULE) <= $<"
|
||||||
|
$(call transform-host-c-or-s-to-o-no-deps)
|
||||||
|
endef
|
||||||
|
|
||||||
|
define tranform-host-m-to-o
|
||||||
|
$(transform-host-m-to-o-no-deps)
|
||||||
|
$(transform-d-to-p)
|
||||||
|
endef
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
## Commands for running ar
|
## Commands for running ar
|
||||||
###########################################################
|
###########################################################
|
||||||
|
|
||||||
|
define extract-and-include-whole-static-libs
|
||||||
|
$(foreach lib,$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES), \
|
||||||
|
@echo "preparing StaticLib: $(PRIVATE_MODULE) [including $(lib)]"; \
|
||||||
|
ldir=$(PRIVATE_INTERMEDIATES_DIR)/WHOLE/$(basename $(notdir $(lib)))_objs;\
|
||||||
|
rm -rf $$ldir; \
|
||||||
|
mkdir -p $$ldir; \
|
||||||
|
filelist=; \
|
||||||
|
for f in `$(TARGET_AR) t $(lib)`; do \
|
||||||
|
$(TARGET_AR) p $(lib) $$f > $$ldir/$$f; \
|
||||||
|
filelist="$$filelist $$ldir/$$f"; \
|
||||||
|
done ; \
|
||||||
|
$(TARGET_AR) $(TARGET_GLOBAL_ARFLAGS) $(PRIVATE_ARFLAGS) $@ $$filelist;\
|
||||||
|
)
|
||||||
|
endef
|
||||||
|
|
||||||
# Explicitly delete the archive first so that ar doesn't
|
# Explicitly delete the archive first so that ar doesn't
|
||||||
# try to add to an existing archive.
|
# try to add to an existing archive.
|
||||||
define transform-o-to-static-lib
|
define transform-o-to-static-lib
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
@echo "target StaticLib: $(PRIVATE_MODULE) ($@)"
|
|
||||||
@rm -f $@
|
@rm -f $@
|
||||||
|
$(extract-and-include-whole-static-libs)
|
||||||
|
@echo "target StaticLib: $(PRIVATE_MODULE) ($@)"
|
||||||
$(hide) $(TARGET_AR) $(TARGET_GLOBAL_ARFLAGS) $(PRIVATE_ARFLAGS) $@ $^
|
$(hide) $(TARGET_AR) $(TARGET_GLOBAL_ARFLAGS) $(PRIVATE_ARFLAGS) $@ $^
|
||||||
endef
|
endef
|
||||||
|
|
||||||
@@ -1122,7 +1168,11 @@ $(hide) $(AAPT) package $(PRIVATE_AAPT_FLAGS) -m -z \
|
|||||||
$(addprefix -P , $(PRIVATE_RESOURCE_PUBLICS_OUTPUT)) \
|
$(addprefix -P , $(PRIVATE_RESOURCE_PUBLICS_OUTPUT)) \
|
||||||
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
|
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
|
||||||
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
|
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
|
||||||
$(addprefix -I , $(PRIVATE_AAPT_INCLUDES))
|
$(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
|
||||||
|
$(addprefix --min-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
|
||||||
|
$(addprefix --target-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
|
||||||
|
$(addprefix --version-code , $(PLATFORM_SDK_VERSION)) \
|
||||||
|
$(addprefix --version-name , $(PLATFORM_VERSION))
|
||||||
endef
|
endef
|
||||||
|
|
||||||
ifeq ($(HOST_OS),windows)
|
ifeq ($(HOST_OS),windows)
|
||||||
@@ -1174,7 +1224,7 @@ define unzip-jar-files
|
|||||||
echo Missing file $$f; \
|
echo Missing file $$f; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi; \
|
fi; \
|
||||||
unzip -q $$f -d $(2); \
|
unzip -qo $$f -d $(2); \
|
||||||
(cd $(2) && rm -rf META-INF); \
|
(cd $(2) && rm -rf META-INF); \
|
||||||
done
|
done
|
||||||
endef
|
endef
|
||||||
@@ -1189,21 +1239,21 @@ $(hide) rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR)
|
|||||||
$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR)
|
$(hide) mkdir -p $(PRIVATE_CLASS_INTERMEDIATES_DIR)
|
||||||
$(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES), \
|
$(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES), \
|
||||||
$(PRIVATE_CLASS_INTERMEDIATES_DIR))
|
$(PRIVATE_CLASS_INTERMEDIATES_DIR))
|
||||||
$(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
|
$(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_INTERMEDIATES_DIR)/java-source-list)
|
||||||
$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
|
$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
|
||||||
find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
|
find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_INTERMEDIATES_DIR)/java-source-list; \
|
||||||
fi
|
fi
|
||||||
$(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
|
$(hide) tr ' ' '\n' < $(PRIVATE_INTERMEDIATES_DIR)/java-source-list \
|
||||||
| sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
|
| sort -u > $(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq
|
||||||
$(hide) $(TARGET_JAVAC) -encoding ascii $(PRIVATE_BOOTCLASSPATH) \
|
$(hide) $(TARGET_JAVAC) -encoding ascii $(PRIVATE_BOOTCLASSPATH) \
|
||||||
$(addprefix -classpath ,$(strip \
|
$(addprefix -classpath ,$(strip \
|
||||||
$(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
|
$(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
|
||||||
$(strip $(PRIVATE_JAVAC_DEBUG_FLAGS)) $(xlint_unchecked) \
|
$(strip $(PRIVATE_JAVAC_DEBUG_FLAGS)) $(xlint_unchecked) \
|
||||||
-extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
|
-extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
|
||||||
\@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq \
|
\@$(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq \
|
||||||
|| ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 )
|
|| ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 )
|
||||||
$(hide) rm -f $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list
|
$(hide) rm -f $(PRIVATE_INTERMEDIATES_DIR)/java-source-list
|
||||||
$(hide) rm -f $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
|
$(hide) rm -f $(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq
|
||||||
$(hide) mkdir -p $(dir $@)
|
$(hide) mkdir -p $(dir $@)
|
||||||
$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
|
$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
|
||||||
$@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
|
$@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
|
||||||
@@ -1250,6 +1300,9 @@ endef
|
|||||||
# A list of dynamic and static parameters; build layers for
|
# A list of dynamic and static parameters; build layers for
|
||||||
# dynamic params that lay over the static ones.
|
# dynamic params that lay over the static ones.
|
||||||
#TODO: update the manifest to point to the package file
|
#TODO: update the manifest to point to the package file
|
||||||
|
#Note that the version numbers are given to aapt as simple default
|
||||||
|
#values; applications can override these by explicitly stating
|
||||||
|
#them in their manifest.
|
||||||
define add-assets-to-package
|
define add-assets-to-package
|
||||||
$(hide) $(AAPT) package -z -u $(PRIVATE_AAPT_FLAGS) \
|
$(hide) $(AAPT) package -z -u $(PRIVATE_AAPT_FLAGS) \
|
||||||
$(addprefix -c , $(PRODUCT_AAPT_CONFIG)) \
|
$(addprefix -c , $(PRODUCT_AAPT_CONFIG)) \
|
||||||
@@ -1257,6 +1310,10 @@ $(hide) $(AAPT) package -z -u $(PRIVATE_AAPT_FLAGS) \
|
|||||||
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
|
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
|
||||||
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
|
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
|
||||||
$(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
|
$(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
|
||||||
|
$(addprefix --min-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
|
||||||
|
$(addprefix --target-sdk-version , $(DEFAULT_APP_TARGET_SDK)) \
|
||||||
|
$(addprefix --version-code , $(PLATFORM_SDK_VERSION)) \
|
||||||
|
$(addprefix --version-name , $(PLATFORM_VERSION)) \
|
||||||
-F $@
|
-F $@
|
||||||
endef
|
endef
|
||||||
|
|
||||||
@@ -1328,14 +1385,16 @@ $(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES), \
|
|||||||
$(PRIVATE_CLASS_INTERMEDIATES_DIR))
|
$(PRIVATE_CLASS_INTERMEDIATES_DIR))
|
||||||
$(call dump-words-to-file,$(sort\
|
$(call dump-words-to-file,$(sort\
|
||||||
$(PRIVATE_JAVA_SOURCES)),\
|
$(PRIVATE_JAVA_SOURCES)),\
|
||||||
$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq)
|
$(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq)
|
||||||
$(hide) $(HOST_JAVAC) -encoding ascii -g \
|
$(hide) $(HOST_JAVAC) -encoding ascii -g \
|
||||||
$(xlint_unchecked) \
|
$(xlint_unchecked) \
|
||||||
$(addprefix -classpath ,$(strip \
|
$(addprefix -classpath ,$(strip \
|
||||||
$(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
|
$(call normalize-path-list,$(PRIVATE_ALL_JAVA_LIBRARIES)))) \
|
||||||
-extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR)\
|
-extdirs "" -d $(PRIVATE_CLASS_INTERMEDIATES_DIR)\
|
||||||
\@$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq || \
|
\@$(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq || \
|
||||||
( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 )
|
( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 )
|
||||||
|
$(hide) rm -f $(PRIVATE_INTERMEDIATES_DIR)/java-source-list
|
||||||
|
$(hide) rm -f $(PRIVATE_INTERMEDIATES_DIR)/java-source-list-uniq
|
||||||
$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
|
$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
|
||||||
$@ $(PRIVATE_JAR_MANIFEST) $(PRIVATE_EXTRA_JAR_ARGS) \
|
$@ $(PRIVATE_JAR_MANIFEST) $(PRIVATE_EXTRA_JAR_ARGS) \
|
||||||
-C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
|
-C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
|
||||||
@@ -1481,8 +1540,16 @@ ifndef get-file-size
|
|||||||
$(error HOST_OS must define get-file-size)
|
$(error HOST_OS must define get-file-size)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# $(1): The file to check (often $@)
|
# Convert a partition data size (eg, as reported in /proc/mtd) to the
|
||||||
# $(2): The maximum size, in decimal bytes
|
# size of the image used to flash that partition (which includes a
|
||||||
|
# 64-byte spare area for each 2048-byte page).
|
||||||
|
# $(1): the partition data size
|
||||||
|
define image-size-from-data-size
|
||||||
|
$(shell echo $$(($(1) / 2048 * (2048+64))))
|
||||||
|
endef
|
||||||
|
|
||||||
|
# $(1): The file(s) to check (often $@)
|
||||||
|
# $(2): The maximum total image size, in decimal bytes
|
||||||
#
|
#
|
||||||
# If $(2) is empty, evaluates to "true"
|
# If $(2) is empty, evaluates to "true"
|
||||||
#
|
#
|
||||||
@@ -1491,19 +1558,21 @@ endif
|
|||||||
# next whole flash block size.
|
# next whole flash block size.
|
||||||
define assert-max-file-size
|
define assert-max-file-size
|
||||||
$(if $(2), \
|
$(if $(2), \
|
||||||
fileSize=`$(call get-file-size,$(1))`; \
|
size=$$(for i in $(1); do $(call get-file-size,$$i); done); \
|
||||||
maxSize=$(2); \
|
total=$$(( $$( echo "$$size" | tr '\n' + ; echo 0 ) )); \
|
||||||
onePct=`expr "(" $$maxSize + 99 ")" / 100`; \
|
printname=$$(echo -n "$(1)" | tr " " +); \
|
||||||
onePct=`expr "(" "(" $$onePct + $(BOARD_FLASH_BLOCK_SIZE) - 1 ")" / \
|
echo "$$printname total size is $$total"; \
|
||||||
$(BOARD_FLASH_BLOCK_SIZE) ")" "*" $(BOARD_FLASH_BLOCK_SIZE)`; \
|
img_blocksize=$(call image-size-from-data-size,$(BOARD_FLASH_BLOCK_SIZE)); \
|
||||||
reserve=`expr 2 "*" $(BOARD_FLASH_BLOCK_SIZE)`; \
|
twoblocks=$$((img_blocksize * 2)); \
|
||||||
if [ "$$onePct" -gt "$$reserve" ]; then \
|
onepct=$$((((($(2) / 100) - 1) / img_blocksize + 1) * img_blocksize)); \
|
||||||
reserve="$$onePct"; \
|
reserve=$$((twoblocks > onepct ? twoblocks : onepct)); \
|
||||||
|
maxsize=$$(($(2) - reserve)); \
|
||||||
|
if [ "$$total" -gt "$$maxsize" ]; then \
|
||||||
|
echo "error: $$printname too large ($$total > [$(2) - $$reserve])"; \
|
||||||
|
false; \
|
||||||
fi; \
|
fi; \
|
||||||
maxSize=`expr $$maxSize - $$reserve`; \
|
if [ "$$total" -gt $$((maxsize - 32768)) ]; then \
|
||||||
if [ "$$fileSize" -gt "$$maxSize" ]; then \
|
echo "WARNING: $$printname approaching size limit ($$total now; limit $$maxsize)"; \
|
||||||
echo "error: $(1) too large ($$fileSize > [$(2) - $$reserve])"; \
|
|
||||||
false; \
|
|
||||||
fi \
|
fi \
|
||||||
, \
|
, \
|
||||||
true \
|
true \
|
||||||
|
@@ -7,6 +7,9 @@
|
|||||||
# OUT_DIR is also set to "out" if it's not already set.
|
# OUT_DIR is also set to "out" if it's not already set.
|
||||||
# this allows you to set it to somewhere else if you like
|
# this allows you to set it to somewhere else if you like
|
||||||
|
|
||||||
|
# Set up version information.
|
||||||
|
include $(BUILD_SYSTEM)/version_defaults.mk
|
||||||
|
|
||||||
# ---------------------------------------------------------------
|
# ---------------------------------------------------------------
|
||||||
# If you update the build system such that the environment setup
|
# If you update the build system such that the environment setup
|
||||||
# or buildspec.mk need to be updated, increment this number, and
|
# or buildspec.mk need to be updated, increment this number, and
|
||||||
@@ -319,6 +322,8 @@ endif # CALLED_FROM_SETUP
|
|||||||
|
|
||||||
ifneq ($(PRINT_BUILD_CONFIG),)
|
ifneq ($(PRINT_BUILD_CONFIG),)
|
||||||
$(info ============================================)
|
$(info ============================================)
|
||||||
|
$(info PLATFORM_VERSION_CODENAME=$(PLATFORM_VERSION_CODENAME))
|
||||||
|
$(info PLATFORM_VERSION=$(PLATFORM_VERSION))
|
||||||
$(info TARGET_PRODUCT=$(TARGET_PRODUCT))
|
$(info TARGET_PRODUCT=$(TARGET_PRODUCT))
|
||||||
$(info TARGET_BUILD_VARIANT=$(TARGET_BUILD_VARIANT))
|
$(info TARGET_BUILD_VARIANT=$(TARGET_BUILD_VARIANT))
|
||||||
$(info TARGET_SIMULATOR=$(TARGET_SIMULATOR))
|
$(info TARGET_SIMULATOR=$(TARGET_SIMULATOR))
|
||||||
|
@@ -188,9 +188,8 @@ $(findbugs_html) : PRIVATE_XML_FILE := $(findbugs_xml)
|
|||||||
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
|
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
|
||||||
$(findbugs_html) : $(findbugs_xml)
|
$(findbugs_html) : $(findbugs_xml)
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
@echo UnionBugs: $@
|
@echo ConvertXmlToText: $@
|
||||||
$(hide) prebuilt/common/findbugs/bin/unionBugs $(PRIVATE_XML_FILE) \
|
$(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
|
||||||
| prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl \
|
|
||||||
> $@
|
> $@
|
||||||
|
|
||||||
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
|
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
|
||||||
|
45
core/main.mk
45
core/main.mk
@@ -85,8 +85,43 @@ $(warning ************************************************************)
|
|||||||
$(error Directory names containing spaces not supported)
|
$(error Directory names containing spaces not supported)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Set up version information.
|
|
||||||
include $(BUILD_SYSTEM)/version_defaults.mk
|
# The windows build server currently uses 1.6. This will be fixed.
|
||||||
|
ifneq ($(HOST_OS),windows)
|
||||||
|
|
||||||
|
# Check for the correct version of java
|
||||||
|
java_version := $(shell java -version 2>&1 | head -n 1 | grep '[ "]1\.5[\. "$$]')
|
||||||
|
ifeq ($(strip $(java_version)),)
|
||||||
|
$(info ************************************************************)
|
||||||
|
$(info You are attempting to build with the incorrect version)
|
||||||
|
$(info of java.)
|
||||||
|
$(info $(space))
|
||||||
|
$(info Your version is: $(shell java -version 2>&1 | head -n 1).)
|
||||||
|
$(info The correct version is: 1.5.)
|
||||||
|
$(info $(space))
|
||||||
|
$(info Please follow the machine setup instructions at)
|
||||||
|
$(info $(space)$(space)$(space)$(space)http://source.android.com/download)
|
||||||
|
$(info ************************************************************)
|
||||||
|
$(error stop)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Check for the correct version of javac
|
||||||
|
javac_version := $(shell javac -version 2>&1 | head -n 1 | grep '[ "]1\.5[\. "$$]')
|
||||||
|
ifeq ($(strip $(javac_version)),)
|
||||||
|
$(info ************************************************************)
|
||||||
|
$(info You are attempting to build with the incorrect version)
|
||||||
|
$(info of javac.)
|
||||||
|
$(info $(space))
|
||||||
|
$(info Your version is: $(shell javac -version 2>&1 | head -n 1).)
|
||||||
|
$(info The correct version is: 1.5.)
|
||||||
|
$(info $(space))
|
||||||
|
$(info Please follow the machine setup instructions at)
|
||||||
|
$(info $(space)$(space)$(space)$(space)http://source.android.com/download)
|
||||||
|
$(info ************************************************************)
|
||||||
|
$(error stop)
|
||||||
|
endif
|
||||||
|
|
||||||
|
endif # windows
|
||||||
|
|
||||||
# These are the modifier targets that don't do anything themselves, but
|
# These are the modifier targets that don't do anything themselves, but
|
||||||
# change the behavior of the build.
|
# change the behavior of the build.
|
||||||
@@ -309,7 +344,6 @@ subdirs := \
|
|||||||
dalvik/tools/dmtracedump \
|
dalvik/tools/dmtracedump \
|
||||||
dalvik/tools/hprof-conv \
|
dalvik/tools/hprof-conv \
|
||||||
development/emulator/mksdcard \
|
development/emulator/mksdcard \
|
||||||
development/tools/activitycreator \
|
|
||||||
development/tools/line_endings \
|
development/tools/line_endings \
|
||||||
development/host \
|
development/host \
|
||||||
external/expat \
|
external/expat \
|
||||||
@@ -336,6 +370,7 @@ subdirs += \
|
|||||||
dalvik/dx \
|
dalvik/dx \
|
||||||
dalvik/libcore \
|
dalvik/libcore \
|
||||||
development/apps \
|
development/apps \
|
||||||
|
development/tools/archquery \
|
||||||
development/tools/androidprefs \
|
development/tools/androidprefs \
|
||||||
development/tools/apkbuilder \
|
development/tools/apkbuilder \
|
||||||
development/tools/jarutils \
|
development/tools/jarutils \
|
||||||
@@ -418,6 +453,10 @@ board_config_mk :=
|
|||||||
|
|
||||||
# Clean up/verify variables defined by the board config file.
|
# Clean up/verify variables defined by the board config file.
|
||||||
TARGET_BOOTLOADER_BOARD_NAME := $(strip $(TARGET_BOOTLOADER_BOARD_NAME))
|
TARGET_BOOTLOADER_BOARD_NAME := $(strip $(TARGET_BOOTLOADER_BOARD_NAME))
|
||||||
|
TARGET_CPU_ABI := $(strip $(TARGET_CPU_ABI))
|
||||||
|
ifeq ($(TARGET_CPU_ABI),)
|
||||||
|
$(error No TARGET_CPU_ABI defined by board config: $(board_config_mk))
|
||||||
|
endif
|
||||||
|
|
||||||
#
|
#
|
||||||
# Include all of the makefiles in the system
|
# Include all of the makefiles in the system
|
||||||
|
@@ -83,6 +83,8 @@ FRAMEWORKS_BASE_SUBDIRS := \
|
|||||||
sax \
|
sax \
|
||||||
telephony \
|
telephony \
|
||||||
wifi \
|
wifi \
|
||||||
|
vpn \
|
||||||
|
keystore \
|
||||||
)
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@@ -21,6 +21,7 @@ libz.so 0xAF900000
|
|||||||
libevent.so 0xAF800000
|
libevent.so 0xAF800000
|
||||||
libssl.so 0xAF700000
|
libssl.so 0xAF700000
|
||||||
libcrypto.so 0xAF500000
|
libcrypto.so 0xAF500000
|
||||||
|
libsysutils.so 0xAF400000
|
||||||
|
|
||||||
# bluetooth
|
# bluetooth
|
||||||
liba2dp.so 0xAEE00000
|
liba2dp.so 0xAEE00000
|
||||||
@@ -104,6 +105,10 @@ libOmxH264Dec.so 0xA6F90000
|
|||||||
libOmxVidEnc.so 0xA6F60000
|
libOmxVidEnc.so 0xA6F60000
|
||||||
libopencorehw.so 0xA6F50000
|
libopencorehw.so 0xA6F50000
|
||||||
|
|
||||||
|
# pv libraries
|
||||||
|
libopencore_common.so 0xA6000000
|
||||||
|
libqcomm_omx.so 0xA5A00000
|
||||||
|
|
||||||
# libraries for specific apps or temporary libraries
|
# libraries for specific apps or temporary libraries
|
||||||
libcam_ipl.so 0x9F000000
|
libcam_ipl.so 0x9F000000
|
||||||
libwbxml.so 0x9E800000
|
libwbxml.so 0x9E800000
|
||||||
@@ -128,3 +133,4 @@ libUAPI_jni.so 0x9A500000
|
|||||||
librpc.so 0x9A400000
|
librpc.so 0x9A400000
|
||||||
libtrace_test.so 0x9A300000
|
libtrace_test.so 0x9A300000
|
||||||
libsrec_jni.so 0x9A200000
|
libsrec_jni.so 0x9A200000
|
||||||
|
libcerttool_jni.so 0x9A100000
|
||||||
|
@@ -110,11 +110,11 @@ ifdef product_goals
|
|||||||
TARGET_BUILD_VARIANT := $(word 2,$(product_goals))
|
TARGET_BUILD_VARIANT := $(word 2,$(product_goals))
|
||||||
|
|
||||||
# The build server wants to do make PRODUCT-dream-installclean
|
# The build server wants to do make PRODUCT-dream-installclean
|
||||||
# which really means TARGET_PRODUCT=dream make installclean.
|
# which really means TARGET_PRODUCT=dream make installclean.
|
||||||
ifneq ($(filter-out $(INTERNAL_VALID_VARIANTS),$(TARGET_BUILD_VARIANT)),)
|
ifneq ($(filter-out $(INTERNAL_VALID_VARIANTS),$(TARGET_BUILD_VARIANT)),)
|
||||||
MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
|
MAKECMDGOALS := $(MAKECMDGOALS) $(TARGET_BUILD_VARIANT)
|
||||||
TARGET_BUILD_VARIANT := eng
|
TARGET_BUILD_VARIANT := eng
|
||||||
default_goal_substitution :=
|
default_goal_substitution :=
|
||||||
else
|
else
|
||||||
default_goal_substitution := $(DEFAULT_GOAL)
|
default_goal_substitution := $(DEFAULT_GOAL)
|
||||||
endif
|
endif
|
||||||
@@ -135,7 +135,7 @@ ifdef product_goals
|
|||||||
#
|
#
|
||||||
# Note that modifying this will not affect the goals that make will
|
# Note that modifying this will not affect the goals that make will
|
||||||
# attempt to build, but it's important because we inspect this value
|
# attempt to build, but it's important because we inspect this value
|
||||||
# in certain situations (like for "make sdk").
|
# in certain situations (like for "make sdk").
|
||||||
#
|
#
|
||||||
MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
|
MAKECMDGOALS := $(patsubst $(goal_name),$(default_goal_substitution),$(MAKECMDGOALS))
|
||||||
|
|
||||||
@@ -185,7 +185,10 @@ PRODUCT_LOCALES := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_LOCALES))
|
|||||||
# in PRODUCT_LOCALES, add them to PRODUCT_LOCALES.
|
# in PRODUCT_LOCALES, add them to PRODUCT_LOCALES.
|
||||||
extra_locales := $(filter-out $(PRODUCT_LOCALES),$(CUSTOM_LOCALES))
|
extra_locales := $(filter-out $(PRODUCT_LOCALES),$(CUSTOM_LOCALES))
|
||||||
ifneq (,$(extra_locales))
|
ifneq (,$(extra_locales))
|
||||||
$(info Adding CUSTOM_LOCALES [$(extra_locales)] to PRODUCT_LOCALES [$(PRODUCT_LOCALES)])
|
ifneq ($(CALLED_FROM_SETUP),true)
|
||||||
|
# Don't spam stdout, because envsetup.sh may be scraping values from it.
|
||||||
|
$(info Adding CUSTOM_LOCALES [$(extra_locales)] to PRODUCT_LOCALES [$(PRODUCT_LOCALES)])
|
||||||
|
endif
|
||||||
PRODUCT_LOCALES += $(extra_locales)
|
PRODUCT_LOCALES += $(extra_locales)
|
||||||
extra_locales :=
|
extra_locales :=
|
||||||
endif
|
endif
|
||||||
@@ -202,7 +205,7 @@ PRODUCT_BRAND := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BRAND))
|
|||||||
|
|
||||||
PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MODEL))
|
PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_MODEL))
|
||||||
ifndef PRODUCT_MODEL
|
ifndef PRODUCT_MODEL
|
||||||
PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_NAME))
|
PRODUCT_MODEL := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_NAME))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
PRODUCT_MANUFACTURER := \
|
PRODUCT_MANUFACTURER := \
|
||||||
@@ -245,32 +248,19 @@ ADDITIONAL_BUILD_PROPERTIES := \
|
|||||||
$(ADDITIONAL_BUILD_PROPERTIES) \
|
$(ADDITIONAL_BUILD_PROPERTIES) \
|
||||||
$(PRODUCT_PROPERTY_OVERRIDES)
|
$(PRODUCT_PROPERTY_OVERRIDES)
|
||||||
|
|
||||||
# Get the list of OTA public keys for the product.
|
# The OTA key(s) specified by the product config, if any. The names
|
||||||
OTA_PUBLIC_KEYS := \
|
# of these keys are stored in the target-files zip so that post-build
|
||||||
$(sort \
|
# signing tools can substitute them for the test key embedded by
|
||||||
$(OTA_PUBLIC_KEYS) \
|
# default.
|
||||||
$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTA_PUBLIC_KEYS) \
|
PRODUCT_OTA_PUBLIC_KEYS := $(sort \
|
||||||
)
|
$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OTA_PUBLIC_KEYS))
|
||||||
|
|
||||||
# HACK: Not all products define OTA keys yet, and the -user build
|
|
||||||
# will fail if no keys are defined.
|
|
||||||
# TODO: Let a product opt out of needing OTA keys, and stop defaulting to
|
|
||||||
# the test key as soon as possible.
|
|
||||||
ifeq (,$(strip $(OTA_PUBLIC_KEYS)))
|
|
||||||
ifeq (,$(CALLED_FROM_SETUP))
|
|
||||||
$(warning WARNING: adding test OTA key)
|
|
||||||
endif
|
|
||||||
OTA_PUBLIC_KEYS := $(SRC_TARGET_DIR)/product/security/testkey.x509.pem
|
|
||||||
endif
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------
|
# ---------------------------------------------------------------
|
||||||
# Force the simulator to be the simulator, and make BUILD_TYPE
|
# Simulator overrides
|
||||||
# default to debug.
|
|
||||||
ifeq ($(TARGET_PRODUCT),sim)
|
ifeq ($(TARGET_PRODUCT),sim)
|
||||||
|
# Tell the build system to turn on some special cases
|
||||||
|
# to deal with the simulator product.
|
||||||
TARGET_SIMULATOR := true
|
TARGET_SIMULATOR := true
|
||||||
ifeq (,$(strip $(TARGET_BUILD_TYPE)))
|
|
||||||
TARGET_BUILD_TYPE := debug
|
|
||||||
endif
|
|
||||||
# dexpreopt doesn't work when building the simulator
|
# dexpreopt doesn't work when building the simulator
|
||||||
DISABLE_DEXPREOPT := true
|
DISABLE_DEXPREOPT := true
|
||||||
endif
|
endif
|
||||||
|
@@ -25,5 +25,6 @@ $(all_objects) : TARGET_GLOBAL_CFLAGS :=
|
|||||||
$(all_objects) : TARGET_GLOBAL_CPPFLAGS :=
|
$(all_objects) : TARGET_GLOBAL_CPPFLAGS :=
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
$(LOCAL_BUILT_MODULE): $(built_whole_libraries)
|
||||||
$(LOCAL_BUILT_MODULE): $(all_objects)
|
$(LOCAL_BUILT_MODULE): $(all_objects)
|
||||||
$(transform-o-to-static-lib)
|
$(transform-o-to-static-lib)
|
||||||
|
@@ -17,21 +17,22 @@ cts_tools_src_dir := cts/tools
|
|||||||
|
|
||||||
cts_name := android-cts
|
cts_name := android-cts
|
||||||
|
|
||||||
CTS_EXECUTABLE := cts
|
CTS_EXECUTABLE := startcts
|
||||||
ifeq ($(HOST_OS),windows)
|
ifeq ($(HOST_OS),windows)
|
||||||
CTS_EXECUTABLE_PATH := $(cts_tools_src_dir)/host/etc/cts.bat
|
CTS_EXECUTABLE_PATH := $(cts_tools_src_dir)/host/etc/cts.bat
|
||||||
else
|
else
|
||||||
CTS_EXECUTABLE_PATH := $(HOST_OUT_EXECUTABLES)/$(CTS_EXECUTABLE)
|
CTS_EXECUTABLE_PATH := $(cts_tools_src_dir)/utils/$(CTS_EXECUTABLE)
|
||||||
endif
|
endif
|
||||||
CTS_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/cts.jar
|
CTS_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/cts.jar
|
||||||
|
|
||||||
|
junit_host_jar := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
|
||||||
|
|
||||||
CTS_CORE_CASE_LIST := android.core.tests.annotation \
|
CTS_CORE_CASE_LIST := android.core.tests.annotation \
|
||||||
android.core.tests.archive \
|
android.core.tests.archive \
|
||||||
android.core.tests.concurrent \
|
android.core.tests.concurrent \
|
||||||
android.core.tests.crypto \
|
android.core.tests.crypto \
|
||||||
android.core.tests.dom \
|
android.core.tests.dom \
|
||||||
android.core.tests.logging \
|
android.core.tests.logging \
|
||||||
android.core.tests.luni \
|
|
||||||
android.core.tests.luni.io \
|
android.core.tests.luni.io \
|
||||||
android.core.tests.luni.lang \
|
android.core.tests.luni.lang \
|
||||||
android.core.tests.luni.net \
|
android.core.tests.luni.net \
|
||||||
@@ -45,7 +46,8 @@ CTS_CORE_CASE_LIST := android.core.tests.annotation \
|
|||||||
android.core.tests.sql \
|
android.core.tests.sql \
|
||||||
android.core.tests.text \
|
android.core.tests.text \
|
||||||
android.core.tests.xml \
|
android.core.tests.xml \
|
||||||
android.core.tests.xnet
|
android.core.tests.xnet \
|
||||||
|
android.core.tests.runner
|
||||||
|
|
||||||
CTS_CASE_LIST := \
|
CTS_CASE_LIST := \
|
||||||
DeviceInfoCollector \
|
DeviceInfoCollector \
|
||||||
@@ -65,11 +67,20 @@ CTS_CASE_LIST := \
|
|||||||
CtsWidgetTestCases \
|
CtsWidgetTestCases \
|
||||||
CtsNetTestCases \
|
CtsNetTestCases \
|
||||||
SignatureTest \
|
SignatureTest \
|
||||||
|
CtsPerformanceTestCases \
|
||||||
|
CtsPerformance2TestCases \
|
||||||
|
CtsPerformance3TestCases \
|
||||||
|
CtsPerformance4TestCases \
|
||||||
|
CtsPerformance5TestCases \
|
||||||
|
ApiDemos \
|
||||||
|
ApiDemosReferenceTest \
|
||||||
$(CTS_CORE_CASE_LIST)
|
$(CTS_CORE_CASE_LIST)
|
||||||
|
|
||||||
DEFAULT_TEST_PLAN := $(PRIVATE_DIR)/resource/plans
|
DEFAULT_TEST_PLAN := $(PRIVATE_DIR)/resource/plans
|
||||||
|
|
||||||
$(cts_dir)/all_cts_files_stamp: $(CTS_CASE_LIST) | $(ACP)
|
$(cts_dir)/all_cts_files_stamp: PRIVATE_JUNIT_HOST_JAR := $(junit_host_jar)
|
||||||
|
|
||||||
|
$(cts_dir)/all_cts_files_stamp: $(CTS_CASE_LIST) $(junit_host_jar) $(ACP)
|
||||||
# Make necessary directory for CTS
|
# Make necessary directory for CTS
|
||||||
@rm -rf $(PRIVATE_CTS_DIR)
|
@rm -rf $(PRIVATE_CTS_DIR)
|
||||||
@mkdir -p $(TMP_DIR)
|
@mkdir -p $(TMP_DIR)
|
||||||
@@ -80,13 +91,14 @@ $(cts_dir)/all_cts_files_stamp: $(CTS_CASE_LIST) | $(ACP)
|
|||||||
# Copy executable to CTS directory
|
# Copy executable to CTS directory
|
||||||
$(hide) $(ACP) -fp $(CTS_HOST_JAR) $(PRIVATE_DIR)/tools
|
$(hide) $(ACP) -fp $(CTS_HOST_JAR) $(PRIVATE_DIR)/tools
|
||||||
$(hide) $(ACP) -fp $(CTS_EXECUTABLE_PATH) $(PRIVATE_DIR)/tools
|
$(hide) $(ACP) -fp $(CTS_EXECUTABLE_PATH) $(PRIVATE_DIR)/tools
|
||||||
|
# Copy junit jar
|
||||||
|
$(hide) $(ACP) -fp $(PRIVATE_JUNIT_HOST_JAR) $(PRIVATE_DIR)/tools
|
||||||
# Change mode of the executables
|
# Change mode of the executables
|
||||||
$(hide) chmod ug+rwX $(PRIVATE_DIR)/tools/$(notdir $(CTS_EXECUTABLE_PATH))
|
$(hide) chmod ug+rwX $(PRIVATE_DIR)/tools/$(notdir $(CTS_EXECUTABLE_PATH))
|
||||||
$(foreach apk,$(CTS_CASE_LIST), \
|
$(foreach apk,$(CTS_CASE_LIST), \
|
||||||
$(call copy-testcase-apk,$(apk)))
|
$(call copy-testcase-apk,$(apk)))
|
||||||
# Copy CTS host config and start script to CTS directory
|
# Copy CTS host config to CTS directory
|
||||||
$(hide) $(ACP) -fp $(cts_tools_src_dir)/utils/host_config.xml $(PRIVATE_DIR)/repository/
|
$(hide) $(ACP) -fp $(cts_tools_src_dir)/utils/host_config.xml $(PRIVATE_DIR)/repository/
|
||||||
$(hide) $(ACP) -fp $(cts_tools_src_dir)/utils/startcts $(PRIVATE_DIR)/tools/
|
|
||||||
$(hide) touch $@
|
$(hide) touch $@
|
||||||
|
|
||||||
# Generate the test descriptions for the core-tests
|
# Generate the test descriptions for the core-tests
|
||||||
@@ -112,7 +124,7 @@ $(cts_dir)/all_cts_core_files_stamp: PRIVATE_PARAMS+=-Dcts.useEnhancedJunit=true
|
|||||||
# build system requires that dependencies use javalib.jar. If
|
# build system requires that dependencies use javalib.jar. If
|
||||||
# javalib.jar is up-to-date, then classes.jar is as well. Depending
|
# javalib.jar is up-to-date, then classes.jar is as well. Depending
|
||||||
# on classes.jar will build the files incorrectly.
|
# on classes.jar will build the files incorrectly.
|
||||||
$(cts_dir)/all_cts_core_files_stamp: $(CTS_CORE_CASE_LIST) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(CORE_INTERMEDIATES)/javalib.jar $(TESTS_INTERMEDIATES)/javalib.jar | $(ACP)
|
$(cts_dir)/all_cts_core_files_stamp: $(CTS_CORE_CASE_LIST) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(CORE_INTERMEDIATES)/javalib.jar $(TESTS_INTERMEDIATES)/javalib.jar $(cts_dir)/all_cts_files_stamp | $(ACP)
|
||||||
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.annotation,\
|
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.annotation,\
|
||||||
cts/tests/core/annotation/AndroidManifest.xml,\
|
cts/tests/core/annotation/AndroidManifest.xml,\
|
||||||
tests.annotation.AllTests)
|
tests.annotation.AllTests)
|
||||||
@@ -131,9 +143,6 @@ $(cts_dir)/all_cts_core_files_stamp: $(CTS_CORE_CASE_LIST) $(HOST_OUT_JAVA_LIBRA
|
|||||||
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.logging,\
|
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.logging,\
|
||||||
cts/tests/core/logging/AndroidManifest.xml,\
|
cts/tests/core/logging/AndroidManifest.xml,\
|
||||||
tests.logging.AllTests)
|
tests.logging.AllTests)
|
||||||
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.luni,\
|
|
||||||
cts/tests/core/luni/AndroidManifest.xml,\
|
|
||||||
tests.luni.AllTests)
|
|
||||||
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.luni.io,\
|
$(call generate-core-test-description,$(cts_dir)/$(cts_name)/repository/testcases/android.core.tests.luni.io,\
|
||||||
cts/tests/core/luni-io/AndroidManifest.xml,\
|
cts/tests/core/luni-io/AndroidManifest.xml,\
|
||||||
tests.luni.AllTestsIo)
|
tests.luni.AllTestsIo)
|
||||||
|
@@ -1,47 +0,0 @@
|
|||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#
|
|
||||||
# Rules for building the xlb files for export for translation.
|
|
||||||
#
|
|
||||||
|
|
||||||
# Gather all of the resource files for the default locale -- that is,
|
|
||||||
# all resources in directories called values or values-something, where
|
|
||||||
# one of the - separated segments is not two characters long -- those are the
|
|
||||||
# language directories, and we don't want those.
|
|
||||||
all_resource_files := $(foreach pkg, \
|
|
||||||
$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_PACKAGES), \
|
|
||||||
$(PACKAGES.$(pkg).RESOURCE_FILES))
|
|
||||||
values_resource_files := $(shell echo $(all_resource_files) | \
|
|
||||||
tr -s / | \
|
|
||||||
tr " " "\n" | \
|
|
||||||
grep -E "\/values[^/]*/(strings.xml|arrays.xml)$$" | \
|
|
||||||
grep -v -E -e "-[a-zA-Z]{2}[/\-]")
|
|
||||||
|
|
||||||
xlb_target := $(PRODUCT_OUT)/strings.xlb
|
|
||||||
|
|
||||||
$(xlb_target): $(values_resource_files) | $(LOCALIZE)
|
|
||||||
@echo XLB: $@
|
|
||||||
$(hide) mkdir -p $(dir $@)
|
|
||||||
$(hide) rm -f $@
|
|
||||||
$(hide) $(LOCALIZE) xlb $@ $^
|
|
||||||
|
|
||||||
# Add a phony target so typing make xlb is convenient
|
|
||||||
.PHONY: xlb
|
|
||||||
xlb: $(xlb_target)
|
|
||||||
|
|
||||||
# We want this on the build-server builds, but no reason to inflict it on
|
|
||||||
# everyone
|
|
||||||
$(call dist-for-goals, droid, $(xlb_target))
|
|
||||||
|
|
@@ -20,6 +20,8 @@
|
|||||||
# Guarantees that the following are defined:
|
# Guarantees that the following are defined:
|
||||||
# PLATFORM_VERSION
|
# PLATFORM_VERSION
|
||||||
# PLATFORM_SDK_VERSION
|
# PLATFORM_SDK_VERSION
|
||||||
|
# PLATFORM_VERSION_CODENAME
|
||||||
|
# DEFAULT_APP_TARGET_SDK
|
||||||
# BUILD_ID
|
# BUILD_ID
|
||||||
# BUILD_NUMBER
|
# BUILD_NUMBER
|
||||||
#
|
#
|
||||||
@@ -39,17 +41,40 @@ ifeq "" "$(PLATFORM_VERSION)"
|
|||||||
# which is the version that we reveal to the end user.
|
# which is the version that we reveal to the end user.
|
||||||
# Update this value when the platform version changes (rather
|
# Update this value when the platform version changes (rather
|
||||||
# than overriding it somewhere else). Can be an arbitrary string.
|
# than overriding it somewhere else). Can be an arbitrary string.
|
||||||
PLATFORM_VERSION := 1.5
|
PLATFORM_VERSION := Donut
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq "" "$(PLATFORM_SDK_VERSION)"
|
ifeq "" "$(PLATFORM_SDK_VERSION)"
|
||||||
# This is the canonical definition of the SDK version, which defines
|
# This is the canonical definition of the SDK version, which defines
|
||||||
# the set of APIs and functionality available in the platform. This is
|
# the set of APIs and functionality available in the platform. It
|
||||||
# a single integer, that increases monotonically as updates to the SDK
|
# is a single integer that increases monotonically as updates to
|
||||||
# are released.
|
# the SDK are released. It should only be incremented when the APIs for
|
||||||
|
# the new release are frozen (so that developers don't write apps against
|
||||||
|
# intermediate builds). During development, this number remains at the
|
||||||
|
# SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
|
||||||
|
# the code-name of the new development work.
|
||||||
PLATFORM_SDK_VERSION := 3
|
PLATFORM_SDK_VERSION := 3
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifeq "" "$(PLATFORM_VERSION_CODENAME)"
|
||||||
|
# If the build is not a final release build, then this is the current
|
||||||
|
# development code-name. If this is a final release build, it is simply "REL".
|
||||||
|
PLATFORM_VERSION_CODENAME := Donut
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq "" "$(DEFAULT_APP_TARGET_SDK)"
|
||||||
|
# This is the default minSdkVersion and targetSdkVersion to use for
|
||||||
|
# all .apks created by the build system. It can be overridden by explicitly
|
||||||
|
# setting these in the .apk's AndroidManifest.xml. It is either the code
|
||||||
|
# name of the development build or, if this is a release build, the official
|
||||||
|
# SDK version of this release.
|
||||||
|
ifeq "REL" "$(PLATFORM_VERSION_CODENAME)"
|
||||||
|
DEFAULT_APP_TARGET_SDK := $(PLATFORM_SDK_VERSION)
|
||||||
|
else
|
||||||
|
DEFAULT_APP_TARGET_SDK := $(PLATFORM_VERSION_CODENAME)
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
ifeq "" "$(BUILD_ID)"
|
ifeq "" "$(BUILD_ID)"
|
||||||
# Used to signify special builds. E.g., branches and/or releases,
|
# Used to signify special builds. E.g., branches and/or releases,
|
||||||
# like "M5-RC7". Can be an arbitrary string, but must be a single
|
# like "M5-RC7". Can be an arbitrary string, but must be a single
|
||||||
|
48
envsetup.sh
48
envsetup.sh
@@ -289,30 +289,6 @@ function choosetype()
|
|||||||
#
|
#
|
||||||
function chooseproduct()
|
function chooseproduct()
|
||||||
{
|
{
|
||||||
# Find the makefiles that must exist for a product.
|
|
||||||
# Send stderr to /dev/null in case partner isn't present.
|
|
||||||
local -a choices
|
|
||||||
choices=(`/bin/ls build/target/board/*/BoardConfig.mk vendor/*/*/BoardConfig.mk 2> /dev/null`)
|
|
||||||
|
|
||||||
local choice
|
|
||||||
local -a prodlist
|
|
||||||
for choice in ${choices[@]}
|
|
||||||
do
|
|
||||||
# The product name is the name of the directory containing
|
|
||||||
# the makefile we found, above.
|
|
||||||
prodlist=(${prodlist[@]} `dirname ${choice} | xargs basename`)
|
|
||||||
done
|
|
||||||
|
|
||||||
local index=1
|
|
||||||
local p
|
|
||||||
echo "Product choices are:"
|
|
||||||
for p in ${prodlist[@]}
|
|
||||||
do
|
|
||||||
echo " $index. $p"
|
|
||||||
let "index = $index + 1"
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
if [ "x$TARGET_PRODUCT" != x ] ; then
|
if [ "x$TARGET_PRODUCT" != x ] ; then
|
||||||
default_value=$TARGET_PRODUCT
|
default_value=$TARGET_PRODUCT
|
||||||
else
|
else
|
||||||
@@ -327,8 +303,7 @@ function chooseproduct()
|
|||||||
local ANSWER
|
local ANSWER
|
||||||
while [ -z "$TARGET_PRODUCT" ]
|
while [ -z "$TARGET_PRODUCT" ]
|
||||||
do
|
do
|
||||||
echo "You can also type the name of a product if you know it."
|
echo -n "Which product would you like? [$default_value] "
|
||||||
echo -n "Which would you like? [$default_value] "
|
|
||||||
if [ -z "$1" ] ; then
|
if [ -z "$1" ] ; then
|
||||||
read ANSWER
|
read ANSWER
|
||||||
else
|
else
|
||||||
@@ -338,13 +313,6 @@ function chooseproduct()
|
|||||||
|
|
||||||
if [ -z "$ANSWER" ] ; then
|
if [ -z "$ANSWER" ] ; then
|
||||||
export TARGET_PRODUCT=$default_value
|
export TARGET_PRODUCT=$default_value
|
||||||
elif (echo -n $ANSWER | grep -q -e "^[0-9][0-9]*$") ; then
|
|
||||||
local poo=`echo -n $ANSWER`
|
|
||||||
if [ $poo -le ${#prodlist[@]} ] ; then
|
|
||||||
export TARGET_PRODUCT=${prodlist[$(($ANSWER-$_arrayoffset))]}
|
|
||||||
else
|
|
||||||
echo "** Bad product selection: $ANSWER"
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
if check_product $ANSWER
|
if check_product $ANSWER
|
||||||
then
|
then
|
||||||
@@ -976,18 +944,14 @@ function runtest()
|
|||||||
echo "Couldn't locate the top of the tree. Try setting TOP." >&2
|
echo "Couldn't locate the top of the tree. Try setting TOP." >&2
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
(cd "$T" && development/tools/runtest $@)
|
(cd "$T" && development/testrunner/runtest.py $@)
|
||||||
}
|
}
|
||||||
|
|
||||||
# simple shortcut to the runtest.py command
|
# TODO: Remove this some time after 1 June 2009
|
||||||
function runtest_py()
|
function runtest_py()
|
||||||
{
|
{
|
||||||
T=$(gettop)
|
echo "runtest_py is obsolete; use runtest instead" >&2
|
||||||
if [ ! "$T" ]; then
|
return 1
|
||||||
echo "Couldn't locate the top of the tree. Try setting TOP." >&2
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
(cd "$T" && development/testrunner/runtest.py $@)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function godir () {
|
function godir () {
|
||||||
@@ -1045,7 +1009,7 @@ fi
|
|||||||
unset _xarray
|
unset _xarray
|
||||||
|
|
||||||
# Execute the contents of any vendorsetup.sh files we can find.
|
# Execute the contents of any vendorsetup.sh files we can find.
|
||||||
for f in `/bin/ls vendor/*/vendorsetup.sh 2> /dev/null`
|
for f in `/bin/ls vendor/*/vendorsetup.sh vendor/*/build/vendorsetup.sh 2> /dev/null`
|
||||||
do
|
do
|
||||||
echo "including $f"
|
echo "including $f"
|
||||||
. $f
|
. $f
|
||||||
|
@@ -3,7 +3,6 @@ include $(CLEAR_VARS)
|
|||||||
|
|
||||||
LOCAL_SRC_FILES:= \
|
LOCAL_SRC_FILES:= \
|
||||||
CopyFile.c \
|
CopyFile.c \
|
||||||
Directories.cpp \
|
|
||||||
pseudolocalize.cpp
|
pseudolocalize.cpp
|
||||||
|
|
||||||
ifeq ($(HOST_OS),cygwin)
|
ifeq ($(HOST_OS),cygwin)
|
||||||
|
@@ -1,42 +0,0 @@
|
|||||||
#include <host/Directories.h>
|
|
||||||
#include <utils/String8.h>
|
|
||||||
#include <sys/types.h>
|
|
||||||
#include <sys/stat.h>
|
|
||||||
|
|
||||||
#ifdef HAVE_MS_C_RUNTIME
|
|
||||||
#include <direct.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
using namespace android;
|
|
||||||
using namespace std;
|
|
||||||
|
|
||||||
string
|
|
||||||
parent_dir(const string& path)
|
|
||||||
{
|
|
||||||
return string(String8(path.c_str()).getPathDir().string());
|
|
||||||
}
|
|
||||||
|
|
||||||
int
|
|
||||||
mkdirs(const char* last)
|
|
||||||
{
|
|
||||||
String8 dest;
|
|
||||||
const char* s = last-1;
|
|
||||||
int err;
|
|
||||||
do {
|
|
||||||
s++;
|
|
||||||
if (s > last && (*s == '.' || *s == 0)) {
|
|
||||||
String8 part(last, s-last);
|
|
||||||
dest.appendPath(part);
|
|
||||||
#ifdef HAVE_MS_C_RUNTIME
|
|
||||||
err = _mkdir(dest.string());
|
|
||||||
#else
|
|
||||||
err = mkdir(dest.string(), S_IRUSR|S_IWUSR|S_IXUSR|S_IRGRP|S_IXGRP);
|
|
||||||
#endif
|
|
||||||
if (err != 0) {
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
last = s+1;
|
|
||||||
}
|
|
||||||
} while (*s);
|
|
||||||
return 0;
|
|
||||||
}
|
|
@@ -7,5 +7,6 @@
|
|||||||
TARGET_NO_BOOTLOADER := true
|
TARGET_NO_BOOTLOADER := true
|
||||||
TARGET_NO_KERNEL := true
|
TARGET_NO_KERNEL := true
|
||||||
TARGET_NO_RADIOIMAGE := true
|
TARGET_NO_RADIOIMAGE := true
|
||||||
|
TARGET_CPU_ABI := armeabi
|
||||||
HAVE_HTC_AUDIO_DRIVER := true
|
HAVE_HTC_AUDIO_DRIVER := true
|
||||||
BOARD_USES_GENERIC_AUDIO := true
|
BOARD_USES_GENERIC_AUDIO := true
|
||||||
|
@@ -17,6 +17,9 @@ TARGET_NO_BOOTLOADER := true
|
|||||||
# Don't bother with a kernel
|
# Don't bother with a kernel
|
||||||
TARGET_NO_KERNEL := true
|
TARGET_NO_KERNEL := true
|
||||||
|
|
||||||
|
# The simulator does not support native code at all
|
||||||
|
TARGET_CPU_ABI := none
|
||||||
|
|
||||||
#the simulator partially emulates the original HTC /dev/eac audio interface
|
#the simulator partially emulates the original HTC /dev/eac audio interface
|
||||||
HAVE_HTC_AUDIO_DRIVER := true
|
HAVE_HTC_AUDIO_DRIVER := true
|
||||||
BOARD_USES_GENERIC_AUDIO := true
|
BOARD_USES_GENERIC_AUDIO := true
|
||||||
|
@@ -12,13 +12,18 @@ PRODUCT_PACKAGES := \
|
|||||||
Launcher \
|
Launcher \
|
||||||
HTMLViewer \
|
HTMLViewer \
|
||||||
Phone \
|
Phone \
|
||||||
|
ApplicationsProvider \
|
||||||
ContactsProvider \
|
ContactsProvider \
|
||||||
DownloadProvider \
|
DownloadProvider \
|
||||||
GoogleSearch \
|
GoogleSearch \
|
||||||
MediaProvider \
|
MediaProvider \
|
||||||
|
PicoTts \
|
||||||
SettingsProvider \
|
SettingsProvider \
|
||||||
TelephonyProvider \
|
TelephonyProvider \
|
||||||
|
TtsService \
|
||||||
|
VpnServices \
|
||||||
UserDictionaryProvider \
|
UserDictionaryProvider \
|
||||||
PackageInstaller \
|
PackageInstaller \
|
||||||
|
WebSearchProvider \
|
||||||
Bugreport
|
Bugreport
|
||||||
|
|
||||||
|
@@ -12,6 +12,7 @@ PRODUCT_PACKAGES := \
|
|||||||
MediaProvider \
|
MediaProvider \
|
||||||
SettingsProvider \
|
SettingsProvider \
|
||||||
PackageInstaller \
|
PackageInstaller \
|
||||||
|
WebSearchProvider \
|
||||||
Bugreport \
|
Bugreport \
|
||||||
Launcher \
|
Launcher \
|
||||||
Settings \
|
Settings \
|
||||||
|
@@ -3,6 +3,7 @@ PRODUCT_PROPERTY_OVERRIDES :=
|
|||||||
PRODUCT_PACKAGES := \
|
PRODUCT_PACKAGES := \
|
||||||
AlarmClock \
|
AlarmClock \
|
||||||
Camera \
|
Camera \
|
||||||
|
Calculator \
|
||||||
Development \
|
Development \
|
||||||
DrmProvider \
|
DrmProvider \
|
||||||
Email \
|
Email \
|
||||||
@@ -18,6 +19,11 @@ PRODUCT_PACKAGES := \
|
|||||||
sqlite3 \
|
sqlite3 \
|
||||||
LatinIME \
|
LatinIME \
|
||||||
PinyinIME \
|
PinyinIME \
|
||||||
|
OpenWnn \
|
||||||
|
libWnnEngDic \
|
||||||
|
libWnnJpnDic \
|
||||||
|
libWnnZHCNDic \
|
||||||
|
libwnndict \
|
||||||
ApiDemos \
|
ApiDemos \
|
||||||
SoftKeyboard
|
SoftKeyboard
|
||||||
|
|
||||||
|
@@ -12,18 +12,40 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
ifneq ($(TARGET_SIMULATOR),true)
|
||||||
|
|
||||||
LOCAL_PATH := $(call my-dir)
|
LOCAL_PATH := $(call my-dir)
|
||||||
include $(CLEAR_VARS)
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
ifneq ($(TARGET_SIMULATOR),true)
|
LOCAL_SRC_FILES := applypatch.c bsdiff.c freecache.c imgpatch.c
|
||||||
|
LOCAL_MODULE := libapplypatch
|
||||||
|
LOCAL_MODULE_TAGS := eng
|
||||||
|
LOCAL_C_INCLUDES += external/bzip2 external/zlib bootable/recovery
|
||||||
|
LOCAL_STATIC_LIBRARIES += libmtdutils libmincrypt libbz libz
|
||||||
|
|
||||||
LOCAL_SRC_FILES := applypatch.c bsdiff.c freecache.c
|
include $(BUILD_STATIC_LIBRARY)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_SRC_FILES := main.c
|
||||||
LOCAL_MODULE := applypatch
|
LOCAL_MODULE := applypatch
|
||||||
LOCAL_FORCE_STATIC_EXECUTABLE := true
|
LOCAL_FORCE_STATIC_EXECUTABLE := true
|
||||||
LOCAL_MODULE_TAGS := eng
|
LOCAL_MODULE_TAGS := eng
|
||||||
LOCAL_C_INCLUDES += external/bzip2
|
LOCAL_STATIC_LIBRARIES += libapplypatch
|
||||||
LOCAL_STATIC_LIBRARIES += libmincrypt libbz libc
|
LOCAL_STATIC_LIBRARIES += libmtdutils libmincrypt libbz libz
|
||||||
|
LOCAL_STATIC_LIBRARIES += libcutils libstdc++ libc
|
||||||
|
|
||||||
include $(BUILD_EXECUTABLE)
|
include $(BUILD_EXECUTABLE)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_SRC_FILES := imgdiff.c
|
||||||
|
LOCAL_MODULE := imgdiff
|
||||||
|
LOCAL_FORCE_STATIC_EXECUTABLE := true
|
||||||
|
LOCAL_MODULE_TAGS := eng
|
||||||
|
LOCAL_C_INCLUDES += external/zlib
|
||||||
|
LOCAL_STATIC_LIBRARIES += libz
|
||||||
|
|
||||||
|
include $(BUILD_HOST_EXECUTABLE)
|
||||||
|
|
||||||
endif # !TARGET_SIMULATOR
|
endif # !TARGET_SIMULATOR
|
||||||
|
@@ -25,12 +25,25 @@
|
|||||||
|
|
||||||
#include "mincrypt/sha.h"
|
#include "mincrypt/sha.h"
|
||||||
#include "applypatch.h"
|
#include "applypatch.h"
|
||||||
|
#include "mtdutils/mtdutils.h"
|
||||||
|
|
||||||
|
int SaveFileContents(const char* filename, FileContents file);
|
||||||
|
int LoadMTDContents(const char* filename, FileContents* file);
|
||||||
|
int ParseSha1(const char* str, uint8_t* digest);
|
||||||
|
|
||||||
|
static int mtd_partitions_scanned = 0;
|
||||||
|
|
||||||
// Read a file into memory; store it and its associated metadata in
|
// Read a file into memory; store it and its associated metadata in
|
||||||
// *file. Return 0 on success.
|
// *file. Return 0 on success.
|
||||||
int LoadFileContents(const char* filename, FileContents* file) {
|
int LoadFileContents(const char* filename, FileContents* file) {
|
||||||
file->data = NULL;
|
file->data = NULL;
|
||||||
|
|
||||||
|
// A special 'filename' beginning with "MTD:" means to load the
|
||||||
|
// contents of an MTD partition.
|
||||||
|
if (strncmp(filename, "MTD:", 4) == 0) {
|
||||||
|
return LoadMTDContents(filename, file);
|
||||||
|
}
|
||||||
|
|
||||||
if (stat(filename, &file->st) != 0) {
|
if (stat(filename, &file->st) != 0) {
|
||||||
fprintf(stderr, "failed to stat \"%s\": %s\n", filename, strerror(errno));
|
fprintf(stderr, "failed to stat \"%s\": %s\n", filename, strerror(errno));
|
||||||
return -1;
|
return -1;
|
||||||
@@ -43,6 +56,7 @@ int LoadFileContents(const char* filename, FileContents* file) {
|
|||||||
if (f == NULL) {
|
if (f == NULL) {
|
||||||
fprintf(stderr, "failed to open \"%s\": %s\n", filename, strerror(errno));
|
fprintf(stderr, "failed to open \"%s\": %s\n", filename, strerror(errno));
|
||||||
free(file->data);
|
free(file->data);
|
||||||
|
file->data = NULL;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,6 +65,7 @@ int LoadFileContents(const char* filename, FileContents* file) {
|
|||||||
fprintf(stderr, "short read of \"%s\" (%d bytes of %d)\n",
|
fprintf(stderr, "short read of \"%s\" (%d bytes of %d)\n",
|
||||||
filename, bytes_read, file->size);
|
filename, bytes_read, file->size);
|
||||||
free(file->data);
|
free(file->data);
|
||||||
|
file->data = NULL;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
fclose(f);
|
fclose(f);
|
||||||
@@ -59,6 +74,182 @@ int LoadFileContents(const char* filename, FileContents* file) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static size_t* size_array;
|
||||||
|
// comparison function for qsort()ing an int array of indexes into
|
||||||
|
// size_array[].
|
||||||
|
static int compare_size_indices(const void* a, const void* b) {
|
||||||
|
int aa = *(int*)a;
|
||||||
|
int bb = *(int*)b;
|
||||||
|
if (size_array[aa] < size_array[bb]) {
|
||||||
|
return -1;
|
||||||
|
} else if (size_array[aa] > size_array[bb]) {
|
||||||
|
return 1;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the contents of an MTD partition into the provided
|
||||||
|
// FileContents. filename should be a string of the form
|
||||||
|
// "MTD:<partition_name>:<size_1>:<sha1_1>:<size_2>:<sha1_2>:...".
|
||||||
|
// The smallest size_n bytes for which that prefix of the mtd contents
|
||||||
|
// has the corresponding sha1 hash will be loaded. It is acceptable
|
||||||
|
// for a size value to be repeated with different sha1s. Will return
|
||||||
|
// 0 on success.
|
||||||
|
//
|
||||||
|
// This complexity is needed because if an OTA installation is
|
||||||
|
// interrupted, the partition might contain either the source or the
|
||||||
|
// target data, which might be of different lengths. We need to know
|
||||||
|
// the length in order to read from MTD (there is no "end-of-file"
|
||||||
|
// marker), so the caller must specify the possible lengths and the
|
||||||
|
// hash of the data, and we'll do the load expecting to find one of
|
||||||
|
// those hashes.
|
||||||
|
int LoadMTDContents(const char* filename, FileContents* file) {
|
||||||
|
char* copy = strdup(filename);
|
||||||
|
const char* magic = strtok(copy, ":");
|
||||||
|
if (strcmp(magic, "MTD") != 0) {
|
||||||
|
fprintf(stderr, "LoadMTDContents called with bad filename (%s)\n",
|
||||||
|
filename);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
const char* partition = strtok(NULL, ":");
|
||||||
|
|
||||||
|
int i;
|
||||||
|
int colons = 0;
|
||||||
|
for (i = 0; filename[i] != '\0'; ++i) {
|
||||||
|
if (filename[i] == ':') {
|
||||||
|
++colons;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (colons < 3 || colons%2 == 0) {
|
||||||
|
fprintf(stderr, "LoadMTDContents called with bad filename (%s)\n",
|
||||||
|
filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
int pairs = (colons-1)/2; // # of (size,sha1) pairs in filename
|
||||||
|
int* index = malloc(pairs * sizeof(int));
|
||||||
|
size_t* size = malloc(pairs * sizeof(size_t));
|
||||||
|
char** sha1sum = malloc(pairs * sizeof(char*));
|
||||||
|
|
||||||
|
for (i = 0; i < pairs; ++i) {
|
||||||
|
const char* size_str = strtok(NULL, ":");
|
||||||
|
size[i] = strtol(size_str, NULL, 10);
|
||||||
|
if (size[i] == 0) {
|
||||||
|
fprintf(stderr, "LoadMTDContents called with bad size (%s)\n", filename);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
sha1sum[i] = strtok(NULL, ":");
|
||||||
|
index[i] = i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// sort the index[] array so it indexes the pairs in order of
|
||||||
|
// increasing size.
|
||||||
|
size_array = size;
|
||||||
|
qsort(index, pairs, sizeof(int), compare_size_indices);
|
||||||
|
|
||||||
|
if (!mtd_partitions_scanned) {
|
||||||
|
mtd_scan_partitions();
|
||||||
|
mtd_partitions_scanned = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const MtdPartition* mtd = mtd_find_partition_by_name(partition);
|
||||||
|
if (mtd == NULL) {
|
||||||
|
fprintf(stderr, "mtd partition \"%s\" not found (loading %s)\n",
|
||||||
|
partition, filename);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
MtdReadContext* ctx = mtd_read_partition(mtd);
|
||||||
|
if (ctx == NULL) {
|
||||||
|
fprintf(stderr, "failed to initialize read of mtd partition \"%s\"\n",
|
||||||
|
partition);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
SHA_CTX sha_ctx;
|
||||||
|
SHA_init(&sha_ctx);
|
||||||
|
uint8_t parsed_sha[SHA_DIGEST_SIZE];
|
||||||
|
|
||||||
|
// allocate enough memory to hold the largest size.
|
||||||
|
file->data = malloc(size[index[pairs-1]]);
|
||||||
|
char* p = (char*)file->data;
|
||||||
|
file->size = 0; // # bytes read so far
|
||||||
|
|
||||||
|
for (i = 0; i < pairs; ++i) {
|
||||||
|
// Read enough additional bytes to get us up to the next size
|
||||||
|
// (again, we're trying the possibilities in order of increasing
|
||||||
|
// size).
|
||||||
|
size_t next = size[index[i]] - file->size;
|
||||||
|
size_t read = 0;
|
||||||
|
if (next > 0) {
|
||||||
|
read = mtd_read_data(ctx, p, next);
|
||||||
|
if (next != read) {
|
||||||
|
fprintf(stderr, "short read (%d bytes of %d) for partition \"%s\"\n",
|
||||||
|
read, next, partition);
|
||||||
|
free(file->data);
|
||||||
|
file->data = NULL;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
SHA_update(&sha_ctx, p, read);
|
||||||
|
file->size += read;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Duplicate the SHA context and finalize the duplicate so we can
|
||||||
|
// check it against this pair's expected hash.
|
||||||
|
SHA_CTX temp_ctx;
|
||||||
|
memcpy(&temp_ctx, &sha_ctx, sizeof(SHA_CTX));
|
||||||
|
const uint8_t* sha_so_far = SHA_final(&temp_ctx);
|
||||||
|
|
||||||
|
if (ParseSha1(sha1sum[index[i]], parsed_sha) != 0) {
|
||||||
|
fprintf(stderr, "failed to parse sha1 %s in %s\n",
|
||||||
|
sha1sum[index[i]], filename);
|
||||||
|
free(file->data);
|
||||||
|
file->data = NULL;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (memcmp(sha_so_far, parsed_sha, SHA_DIGEST_SIZE) == 0) {
|
||||||
|
// we have a match. stop reading the partition; we'll return
|
||||||
|
// the data we've read so far.
|
||||||
|
printf("mtd read matched size %d sha %s\n",
|
||||||
|
size[index[i]], sha1sum[index[i]]);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
p += read;
|
||||||
|
}
|
||||||
|
|
||||||
|
mtd_read_close(ctx);
|
||||||
|
|
||||||
|
if (i == pairs) {
|
||||||
|
// Ran off the end of the list of (size,sha1) pairs without
|
||||||
|
// finding a match.
|
||||||
|
fprintf(stderr, "contents of MTD partition \"%s\" didn't match %s\n",
|
||||||
|
partition, filename);
|
||||||
|
free(file->data);
|
||||||
|
file->data = NULL;
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint8_t* sha_final = SHA_final(&sha_ctx);
|
||||||
|
for (i = 0; i < SHA_DIGEST_SIZE; ++i) {
|
||||||
|
file->sha1[i] = sha_final[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fake some stat() info.
|
||||||
|
file->st.st_mode = 0644;
|
||||||
|
file->st.st_uid = 0;
|
||||||
|
file->st.st_gid = 0;
|
||||||
|
|
||||||
|
free(copy);
|
||||||
|
free(index);
|
||||||
|
free(size);
|
||||||
|
free(sha1sum);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Save the contents of the given FileContents object under the given
|
// Save the contents of the given FileContents object under the given
|
||||||
// filename. Return 0 on success.
|
// filename. Return 0 on success.
|
||||||
int SaveFileContents(const char* filename, FileContents file) {
|
int SaveFileContents(const char* filename, FileContents file) {
|
||||||
@@ -91,6 +282,76 @@ int SaveFileContents(const char* filename, FileContents file) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Copy the contents of source_file to target_mtd partition, a string
|
||||||
|
// of the form "MTD:<partition>[:...]". Return 0 on success.
|
||||||
|
int CopyToMTDPartition(const char* source_file, const char* target_mtd) {
|
||||||
|
char* partition = strchr(target_mtd, ':');
|
||||||
|
if (partition == NULL) {
|
||||||
|
fprintf(stderr, "bad MTD target name \"%s\"\n", target_mtd);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
++partition;
|
||||||
|
// Trim off anything after a colon, eg "MTD:boot:blah:blah:blah...".
|
||||||
|
// We want just the partition name "boot".
|
||||||
|
partition = strdup(partition);
|
||||||
|
char* end = strchr(partition, ':');
|
||||||
|
if (end != NULL)
|
||||||
|
*end = '\0';
|
||||||
|
|
||||||
|
FILE* f = fopen(source_file, "rb");
|
||||||
|
if (f == NULL) {
|
||||||
|
fprintf(stderr, "failed to open %s for reading: %s\n",
|
||||||
|
source_file, strerror(errno));
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mtd_partitions_scanned) {
|
||||||
|
mtd_scan_partitions();
|
||||||
|
mtd_partitions_scanned = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const MtdPartition* mtd = mtd_find_partition_by_name(partition);
|
||||||
|
if (mtd == NULL) {
|
||||||
|
fprintf(stderr, "mtd partition \"%s\" not found for writing\n", partition);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
MtdWriteContext* ctx = mtd_write_partition(mtd);
|
||||||
|
if (ctx == NULL) {
|
||||||
|
fprintf(stderr, "failed to init mtd partition \"%s\" for writing\n",
|
||||||
|
partition);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const int buffer_size = 4096;
|
||||||
|
char buffer[buffer_size];
|
||||||
|
size_t read;
|
||||||
|
while ((read = fread(buffer, 1, buffer_size, f)) > 0) {
|
||||||
|
size_t written = mtd_write_data(ctx, buffer, read);
|
||||||
|
if (written != read) {
|
||||||
|
fprintf(stderr, "only wrote %d of %d bytes to MTD %s\n",
|
||||||
|
written, read, partition);
|
||||||
|
mtd_write_close(ctx);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fclose(f);
|
||||||
|
if (mtd_erase_blocks(ctx, -1) < 0) {
|
||||||
|
fprintf(stderr, "error finishing mtd write of %s\n", partition);
|
||||||
|
mtd_write_close(ctx);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mtd_write_close(ctx)) {
|
||||||
|
fprintf(stderr, "error closing mtd write of %s\n", partition);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
free(partition);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Take a string 'str' of 40 hex digits and parse it into the 20
|
// Take a string 'str' of 40 hex digits and parse it into the 20
|
||||||
// byte array 'digest'. 'str' may contain only the digest or be of
|
// byte array 'digest'. 'str' may contain only the digest or be of
|
||||||
@@ -176,8 +437,13 @@ int CheckMode(int argc, char** argv) {
|
|||||||
FileContents file;
|
FileContents file;
|
||||||
file.data = NULL;
|
file.data = NULL;
|
||||||
|
|
||||||
|
// It's okay to specify no sha1s; the check will pass if the
|
||||||
|
// LoadFileContents is successful. (Useful for reading MTD
|
||||||
|
// partitions, where the filename encodes the sha1s; no need to
|
||||||
|
// check them twice.)
|
||||||
if (LoadFileContents(argv[2], &file) != 0 ||
|
if (LoadFileContents(argv[2], &file) != 0 ||
|
||||||
FindMatchingPatch(file.sha1, patches, num_patches) == NULL) {
|
(num_patches > 0 &&
|
||||||
|
FindMatchingPatch(file.sha1, patches, num_patches) == NULL)) {
|
||||||
fprintf(stderr, "file \"%s\" doesn't have any of expected "
|
fprintf(stderr, "file \"%s\" doesn't have any of expected "
|
||||||
"sha1 sums; checking cache\n", argv[2]);
|
"sha1 sums; checking cache\n", argv[2]);
|
||||||
|
|
||||||
@@ -226,27 +492,57 @@ size_t FreeSpaceForFile(const char* filename) {
|
|||||||
// replacement for it) and idempotent (it's okay to run this program
|
// replacement for it) and idempotent (it's okay to run this program
|
||||||
// multiple times).
|
// multiple times).
|
||||||
//
|
//
|
||||||
// - if the sha1 hash of <file> is <tgt-sha1>, does nothing and exits
|
// - if the sha1 hash of <tgt-file> is <tgt-sha1>, does nothing and exits
|
||||||
// successfully.
|
// successfully.
|
||||||
//
|
//
|
||||||
// - otherwise, if the sha1 hash of <file> is <src-sha1>, applies the
|
// - otherwise, if the sha1 hash of <src-file> is <src-sha1>, applies the
|
||||||
// bsdiff <patch> to <file> to produce a new file (the type of patch
|
// bsdiff <patch> to <src-file> to produce a new file (the type of patch
|
||||||
// is automatically detected from the file header). If that new
|
// is automatically detected from the file header). If that new
|
||||||
// file has sha1 hash <tgt-sha1>, moves it to replace <file>, and
|
// file has sha1 hash <tgt-sha1>, moves it to replace <tgt-file>, and
|
||||||
// exits successfully.
|
// exits successfully. Note that if <src-file> and <tgt-file> are
|
||||||
|
// not the same, <src-file> is NOT deleted on success. <tgt-file>
|
||||||
|
// may be the string "-" to mean "the same as src-file".
|
||||||
//
|
//
|
||||||
// - otherwise, or if any error is encountered, exits with non-zero
|
// - otherwise, or if any error is encountered, exits with non-zero
|
||||||
// status.
|
// status.
|
||||||
|
//
|
||||||
|
// <src-file> (or <file> in check mode) may refer to an MTD partition
|
||||||
|
// to read the source data. See the comments for the
|
||||||
|
// LoadMTDContents() function above for the format of such a filename.
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// As you might guess from the arguments, this function used to be
|
||||||
|
// main(); it was split out this way so applypatch could be built as a
|
||||||
|
// static library and linked into other executables as well. In the
|
||||||
|
// future only the library form will exist; we will not need to build
|
||||||
|
// this as a standalone executable.
|
||||||
|
//
|
||||||
|
// The arguments to this function are just the command-line of the
|
||||||
|
// standalone executable:
|
||||||
|
//
|
||||||
|
// <src-file> <tgt-file> <tgt-sha1> <tgt-size> [<src-sha1>:<patch> ...]
|
||||||
|
// to apply a patch. Returns 0 on success, 1 on failure.
|
||||||
|
//
|
||||||
|
// "-c" <file> [<sha1> ...]
|
||||||
|
// to check a file's contents against zero or more sha1s. Returns
|
||||||
|
// 0 if it matches any of them, 1 if it doesn't.
|
||||||
|
//
|
||||||
|
// "-s" <bytes>
|
||||||
|
// returns 0 if enough free space is available on /cache; 1 if it
|
||||||
|
// does not.
|
||||||
|
//
|
||||||
|
// "-l"
|
||||||
|
// shows open-source license information and returns 0.
|
||||||
|
//
|
||||||
|
// This function returns 2 if the arguments are not understood (in the
|
||||||
|
// standalone executable, this causes the usage message to be
|
||||||
|
// printed).
|
||||||
|
//
|
||||||
|
// TODO: make the interface more sensible for use as a library.
|
||||||
|
|
||||||
int main(int argc, char** argv) {
|
int applypatch(int argc, char** argv) {
|
||||||
if (argc < 2) {
|
if (argc < 2) {
|
||||||
usage:
|
return 2;
|
||||||
fprintf(stderr, "usage: %s <file> <tgt-sha1> <tgt-size> [<src-sha1>:<patch> ...]\n"
|
|
||||||
" or %s -c <file> [<sha1> ...]\n"
|
|
||||||
" or %s -s <bytes>\n"
|
|
||||||
" or %s -l\n",
|
|
||||||
argv[0], argv[0], argv[0], argv[0]);
|
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (strncmp(argv[1], "-l", 3) == 0) {
|
if (strncmp(argv[1], "-l", 3) == 0) {
|
||||||
@@ -259,7 +555,7 @@ int main(int argc, char** argv) {
|
|||||||
|
|
||||||
if (strncmp(argv[1], "-s", 3) == 0) {
|
if (strncmp(argv[1], "-s", 3) == 0) {
|
||||||
if (argc != 3) {
|
if (argc != 3) {
|
||||||
goto usage;
|
return 2;
|
||||||
}
|
}
|
||||||
size_t bytes = strtol(argv[2], NULL, 10);
|
size_t bytes = strtol(argv[2], NULL, 10);
|
||||||
if (MakeFreeSpaceOnCache(bytes) < 0) {
|
if (MakeFreeSpaceOnCache(bytes) < 0) {
|
||||||
@@ -273,26 +569,22 @@ int main(int argc, char** argv) {
|
|||||||
uint8_t target_sha1[SHA_DIGEST_SIZE];
|
uint8_t target_sha1[SHA_DIGEST_SIZE];
|
||||||
|
|
||||||
const char* source_filename = argv[1];
|
const char* source_filename = argv[1];
|
||||||
|
const char* target_filename = argv[2];
|
||||||
// assume that source_filename (eg "/system/app/Foo.apk") is located
|
if (target_filename[0] == '-' &&
|
||||||
// on the same filesystem as its top-level directory ("/system").
|
target_filename[1] == '\0') {
|
||||||
// We need something that exists for calling statfs().
|
target_filename = source_filename;
|
||||||
char* source_fs = strdup(argv[1]);
|
|
||||||
char* slash = strchr(source_fs+1, '/');
|
|
||||||
if (slash != NULL) {
|
|
||||||
*slash = '\0';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ParseSha1(argv[2], target_sha1) != 0) {
|
if (ParseSha1(argv[3], target_sha1) != 0) {
|
||||||
fprintf(stderr, "failed to parse tgt-sha1 \"%s\"\n", argv[2]);
|
fprintf(stderr, "failed to parse tgt-sha1 \"%s\"\n", argv[3]);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned long target_size = strtoul(argv[3], NULL, 0);
|
unsigned long target_size = strtoul(argv[4], NULL, 0);
|
||||||
|
|
||||||
int num_patches;
|
int num_patches;
|
||||||
Patch* patches;
|
Patch* patches;
|
||||||
if (ParseShaArgs(argc-4, argv+4, &patches, &num_patches) < 0) { return 1; }
|
if (ParseShaArgs(argc-5, argv+5, &patches, &num_patches) < 0) { return 1; }
|
||||||
|
|
||||||
FileContents copy_file;
|
FileContents copy_file;
|
||||||
FileContents source_file;
|
FileContents source_file;
|
||||||
@@ -300,15 +592,27 @@ int main(int argc, char** argv) {
|
|||||||
const char* copy_patch_filename = NULL;
|
const char* copy_patch_filename = NULL;
|
||||||
int made_copy = 0;
|
int made_copy = 0;
|
||||||
|
|
||||||
if (LoadFileContents(source_filename, &source_file) == 0) {
|
// We try to load the target file into the source_file object.
|
||||||
|
if (LoadFileContents(target_filename, &source_file) == 0) {
|
||||||
if (memcmp(source_file.sha1, target_sha1, SHA_DIGEST_SIZE) == 0) {
|
if (memcmp(source_file.sha1, target_sha1, SHA_DIGEST_SIZE) == 0) {
|
||||||
// The early-exit case: the patch was already applied, this file
|
// The early-exit case: the patch was already applied, this file
|
||||||
// has the desired hash, nothing for us to do.
|
// has the desired hash, nothing for us to do.
|
||||||
fprintf(stderr, "\"%s\" is already target; no patch needed\n",
|
fprintf(stderr, "\"%s\" is already target; no patch needed\n",
|
||||||
source_filename);
|
target_filename);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source_file.data == NULL ||
|
||||||
|
(target_filename != source_filename &&
|
||||||
|
strcmp(target_filename, source_filename) != 0)) {
|
||||||
|
// Need to load the source file: either we failed to load the
|
||||||
|
// target file, or we did but it's different from the source file.
|
||||||
|
free(source_file.data);
|
||||||
|
LoadFileContents(source_filename, &source_file);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source_file.data != NULL) {
|
||||||
const Patch* to_use =
|
const Patch* to_use =
|
||||||
FindMatchingPatch(source_file.sha1, patches, num_patches);
|
FindMatchingPatch(source_file.sha1, patches, num_patches);
|
||||||
if (to_use != NULL) {
|
if (to_use != NULL) {
|
||||||
@@ -339,30 +643,70 @@ int main(int argc, char** argv) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is there enough room in the target filesystem to hold the patched file?
|
// Is there enough room in the target filesystem to hold the patched
|
||||||
size_t free_space = FreeSpaceForFile(source_fs);
|
// file?
|
||||||
int enough_space = free_space > (target_size * 3 / 2); // 50% margin of error
|
|
||||||
printf("target %ld bytes; free space %ld bytes; enough %d\n",
|
|
||||||
(long)target_size, (long)free_space, enough_space);
|
|
||||||
|
|
||||||
if (!enough_space && source_patch_filename != NULL) {
|
if (strncmp(target_filename, "MTD:", 4) == 0) {
|
||||||
// Using the original source, but not enough free space. First
|
// If the target is an MTD partition, we're actually going to
|
||||||
// copy the source file to cache, then delete it from the original
|
// write the output to /tmp and then copy it to the partition.
|
||||||
// location.
|
// statfs() always returns 0 blocks free for /tmp, so instead
|
||||||
|
// we'll just assume that /tmp has enough space to hold the file.
|
||||||
|
|
||||||
|
// We still write the original source to cache, in case the MTD
|
||||||
|
// write is interrupted.
|
||||||
if (MakeFreeSpaceOnCache(source_file.size) < 0) {
|
if (MakeFreeSpaceOnCache(source_file.size) < 0) {
|
||||||
fprintf(stderr, "not enough free space on /cache\n");
|
fprintf(stderr, "not enough free space on /cache\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (SaveFileContents(CACHE_TEMP_SOURCE, source_file) < 0) {
|
if (SaveFileContents(CACHE_TEMP_SOURCE, source_file) < 0) {
|
||||||
fprintf(stderr, "failed to back up source file\n");
|
fprintf(stderr, "failed to back up source file\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
made_copy = 1;
|
made_copy = 1;
|
||||||
unlink(source_filename);
|
} else {
|
||||||
|
// assume that target_filename (eg "/system/app/Foo.apk") is located
|
||||||
|
// on the same filesystem as its top-level directory ("/system").
|
||||||
|
// We need something that exists for calling statfs().
|
||||||
|
char* target_fs = strdup(target_filename);
|
||||||
|
char* slash = strchr(target_fs+1, '/');
|
||||||
|
if (slash != NULL) {
|
||||||
|
*slash = '\0';
|
||||||
|
}
|
||||||
|
|
||||||
size_t free_space = FreeSpaceForFile(source_fs);
|
size_t free_space = FreeSpaceForFile(target_fs);
|
||||||
printf("(now %ld bytes free for source)\n", (long)free_space);
|
int enough_space =
|
||||||
|
free_space > (target_size * 3 / 2); // 50% margin of error
|
||||||
|
printf("target %ld bytes; free space %ld bytes; enough %d\n",
|
||||||
|
(long)target_size, (long)free_space, enough_space);
|
||||||
|
|
||||||
|
if (!enough_space && source_patch_filename != NULL) {
|
||||||
|
// Using the original source, but not enough free space. First
|
||||||
|
// copy the source file to cache, then delete it from the original
|
||||||
|
// location.
|
||||||
|
|
||||||
|
if (strncmp(source_filename, "MTD:", 4) == 0) {
|
||||||
|
// It's impossible to free space on the target filesystem by
|
||||||
|
// deleting the source if the source is an MTD partition. If
|
||||||
|
// we're ever in a state where we need to do this, fail.
|
||||||
|
fprintf(stderr, "not enough free space for target but source is MTD\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (MakeFreeSpaceOnCache(source_file.size) < 0) {
|
||||||
|
fprintf(stderr, "not enough free space on /cache\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (SaveFileContents(CACHE_TEMP_SOURCE, source_file) < 0) {
|
||||||
|
fprintf(stderr, "failed to back up source file\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
made_copy = 1;
|
||||||
|
unlink(source_filename);
|
||||||
|
|
||||||
|
size_t free_space = FreeSpaceForFile(target_fs);
|
||||||
|
printf("(now %ld bytes free for target)\n", (long)free_space);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
FileContents* source_to_use;
|
FileContents* source_to_use;
|
||||||
@@ -375,14 +719,19 @@ int main(int argc, char** argv) {
|
|||||||
patch_filename = copy_patch_filename;
|
patch_filename = copy_patch_filename;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We write the decoded output to "<file>.patch".
|
char* outname = NULL;
|
||||||
char* outname = (char*)malloc(strlen(source_filename) + 10);
|
if (strncmp(target_filename, "MTD:", 4) == 0) {
|
||||||
strcpy(outname, source_filename);
|
outname = MTD_TARGET_TEMP_FILE;
|
||||||
strcat(outname, ".patch");
|
} else {
|
||||||
|
// We write the decoded output to "<tgt-file>.patch".
|
||||||
|
outname = (char*)malloc(strlen(target_filename) + 10);
|
||||||
|
strcpy(outname, target_filename);
|
||||||
|
strcat(outname, ".patch");
|
||||||
|
}
|
||||||
FILE* output = fopen(outname, "wb");
|
FILE* output = fopen(outname, "wb");
|
||||||
if (output == NULL) {
|
if (output == NULL) {
|
||||||
fprintf(stderr, "failed to patch file %s: %s\n",
|
fprintf(stderr, "failed to open output file %s: %s\n",
|
||||||
source_filename, strerror(errno));
|
outname, strerror(errno));
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -410,11 +759,19 @@ int main(int argc, char** argv) {
|
|||||||
} else if (header_bytes_read >= 8 &&
|
} else if (header_bytes_read >= 8 &&
|
||||||
memcmp(header, "BSDIFF40", 8) == 0) {
|
memcmp(header, "BSDIFF40", 8) == 0) {
|
||||||
int result = ApplyBSDiffPatch(source_to_use->data, source_to_use->size,
|
int result = ApplyBSDiffPatch(source_to_use->data, source_to_use->size,
|
||||||
patch_filename, output, &ctx);
|
patch_filename, 0, output, &ctx);
|
||||||
if (result != 0) {
|
if (result != 0) {
|
||||||
fprintf(stderr, "ApplyBSDiffPatch failed\n");
|
fprintf(stderr, "ApplyBSDiffPatch failed\n");
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
} else if (header_bytes_read >= 8 &&
|
||||||
|
memcmp(header, "IMGDIFF1", 8) == 0) {
|
||||||
|
int result = ApplyImagePatch(source_to_use->data, source_to_use->size,
|
||||||
|
patch_filename, output, &ctx);
|
||||||
|
if (result != 0) {
|
||||||
|
fprintf(stderr, "ApplyImagePatch failed\n");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
fprintf(stderr, "Unknown patch file format");
|
fprintf(stderr, "Unknown patch file format");
|
||||||
return 1;
|
return 1;
|
||||||
@@ -430,22 +787,32 @@ int main(int argc, char** argv) {
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Give the .patch file the same owner, group, and mode of the
|
if (strcmp(outname, MTD_TARGET_TEMP_FILE) == 0) {
|
||||||
// original source file.
|
// Copy the temp file to the MTD partition.
|
||||||
if (chmod(outname, source_to_use->st.st_mode) != 0) {
|
if (CopyToMTDPartition(outname, target_filename) != 0) {
|
||||||
fprintf(stderr, "chmod of \"%s\" failed: %s\n", outname, strerror(errno));
|
fprintf(stderr, "copy of %s to %s failed\n", outname, target_filename);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
if (chown(outname, source_to_use->st.st_uid, source_to_use->st.st_gid) != 0) {
|
unlink(outname);
|
||||||
fprintf(stderr, "chown of \"%s\" failed: %s\n", outname, strerror(errno));
|
} else {
|
||||||
return 1;
|
// Give the .patch file the same owner, group, and mode of the
|
||||||
}
|
// original source file.
|
||||||
|
if (chmod(outname, source_to_use->st.st_mode) != 0) {
|
||||||
|
fprintf(stderr, "chmod of \"%s\" failed: %s\n", outname, strerror(errno));
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (chown(outname, source_to_use->st.st_uid,
|
||||||
|
source_to_use->st.st_gid) != 0) {
|
||||||
|
fprintf(stderr, "chown of \"%s\" failed: %s\n", outname, strerror(errno));
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
// Finally, rename the .patch file to replace the original source file.
|
// Finally, rename the .patch file to replace the target file.
|
||||||
if (rename(outname, source_filename) != 0) {
|
if (rename(outname, target_filename) != 0) {
|
||||||
fprintf(stderr, "rename of .patch to \"%s\" failed: %s\n",
|
fprintf(stderr, "rename of .patch to \"%s\" failed: %s\n",
|
||||||
source_filename, strerror(errno));
|
target_filename, strerror(errno));
|
||||||
return 1;
|
return 1;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this run of applypatch created the copy, and we're here, we
|
// If this run of applypatch created the copy, and we're here, we
|
||||||
|
@@ -17,6 +17,7 @@
|
|||||||
#ifndef _APPLYPATCH_H
|
#ifndef _APPLYPATCH_H
|
||||||
#define _APPLYPATCH_H
|
#define _APPLYPATCH_H
|
||||||
|
|
||||||
|
#include <sys/stat.h>
|
||||||
#include "mincrypt/sha.h"
|
#include "mincrypt/sha.h"
|
||||||
|
|
||||||
typedef struct _Patch {
|
typedef struct _Patch {
|
||||||
@@ -38,12 +39,26 @@ typedef struct _FileContents {
|
|||||||
// and use it as the source instead.
|
// and use it as the source instead.
|
||||||
#define CACHE_TEMP_SOURCE "/cache/saved.file"
|
#define CACHE_TEMP_SOURCE "/cache/saved.file"
|
||||||
|
|
||||||
|
// When writing to an MTD partition, we first put the output in this
|
||||||
|
// temp file, then copy it to the partition once the patching is
|
||||||
|
// finished (and the target sha1 verified).
|
||||||
|
#define MTD_TARGET_TEMP_FILE "/tmp/mtd-temp"
|
||||||
|
|
||||||
// applypatch.c
|
// applypatch.c
|
||||||
size_t FreeSpaceForFile(const char* filename);
|
size_t FreeSpaceForFile(const char* filename);
|
||||||
|
int applypatch(int argc, char** argv);
|
||||||
|
|
||||||
// bsdiff.c
|
// bsdiff.c
|
||||||
void ShowBSDiffLicense();
|
void ShowBSDiffLicense();
|
||||||
int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
||||||
|
const char* patch_filename, ssize_t offset,
|
||||||
|
FILE* output, SHA_CTX* ctx);
|
||||||
|
int ApplyBSDiffPatchMem(const unsigned char* old_data, ssize_t old_size,
|
||||||
|
const char* patch_filename, ssize_t patch_offset,
|
||||||
|
unsigned char** new_data, ssize_t* new_size);
|
||||||
|
|
||||||
|
// imgpatch.c
|
||||||
|
int ApplyImagePatch(const unsigned char* old_data, ssize_t old_size,
|
||||||
const char* patch_filename,
|
const char* patch_filename,
|
||||||
FILE* output, SHA_CTX* ctx);
|
FILE* output, SHA_CTX* ctx);
|
||||||
|
|
||||||
|
@@ -24,16 +24,22 @@ WORK_DIR=/system
|
|||||||
# partition that WORK_DIR is located on, without the leading slash
|
# partition that WORK_DIR is located on, without the leading slash
|
||||||
WORK_FS=system
|
WORK_FS=system
|
||||||
|
|
||||||
|
# set to 0 to use a device instead
|
||||||
|
USE_EMULATOR=1
|
||||||
|
|
||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
tmpdir=$(mktemp -d)
|
tmpdir=$(mktemp -d)
|
||||||
|
|
||||||
emulator -wipe-data -noaudio -no-window -port $EMULATOR_PORT &
|
if [ "$USE_EMULATOR" == 1 ]; then
|
||||||
pid_emulator=$!
|
emulator -wipe-data -noaudio -no-window -port $EMULATOR_PORT &
|
||||||
|
pid_emulator=$!
|
||||||
|
ADB="adb -s emulator-$EMULATOR_PORT "
|
||||||
|
else
|
||||||
|
ADB="adb -d "
|
||||||
|
fi
|
||||||
|
|
||||||
ADB="adb -s emulator-$EMULATOR_PORT "
|
echo "waiting to connect to device"
|
||||||
|
|
||||||
echo "emulator is $pid_emulator; waiting for startup"
|
|
||||||
$ADB wait-for-device
|
$ADB wait-for-device
|
||||||
echo "device is available"
|
echo "device is available"
|
||||||
$ADB remount
|
$ADB remount
|
||||||
@@ -56,7 +62,8 @@ fail() {
|
|||||||
echo
|
echo
|
||||||
echo FAIL: $testname
|
echo FAIL: $testname
|
||||||
echo
|
echo
|
||||||
kill $pid_emulator
|
[ "$open_pid" == "" ] || kill $open_pid
|
||||||
|
[ "$pid_emulator" == "" ] || kill $pid_emulator
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,6 +75,23 @@ free_space() {
|
|||||||
run_command df | awk "/$1/ {print gensub(/K/, \"\", \"g\", \$6)}"
|
run_command df | awk "/$1/ {print gensub(/K/, \"\", \"g\", \$6)}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
# not necessary if we're about to kill the emulator, but nice for
|
||||||
|
# running on real devices or already-running emulators.
|
||||||
|
testname "removing test files"
|
||||||
|
run_command rm $WORK_DIR/bloat.dat
|
||||||
|
run_command rm $WORK_DIR/old.file
|
||||||
|
run_command rm $WORK_DIR/patch.bsdiff
|
||||||
|
run_command rm $WORK_DIR/applypatch
|
||||||
|
run_command rm $CACHE_TEMP_SOURCE
|
||||||
|
run_command rm /cache/bloat*.dat
|
||||||
|
|
||||||
|
[ "$pid_emulator" == "" ] || kill $pid_emulator
|
||||||
|
|
||||||
|
rm -rf $tmpdir
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup
|
||||||
|
|
||||||
$ADB push $ANDROID_PRODUCT_OUT/system/bin/applypatch $WORK_DIR/applypatch
|
$ADB push $ANDROID_PRODUCT_OUT/system/bin/applypatch $WORK_DIR/applypatch
|
||||||
|
|
||||||
@@ -146,16 +170,71 @@ if (( free_kb * 1024 < NEW_SIZE * 3 / 2 )); then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
testname "apply bsdiff patch"
|
testname "apply bsdiff patch"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
testname "reapply bsdiff patch"
|
testname "reapply bsdiff patch"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
|
|
||||||
|
# --------------- apply patch in new location ----------------------
|
||||||
|
|
||||||
|
$ADB push $DATA_DIR/old.file $WORK_DIR
|
||||||
|
$ADB push $DATA_DIR/patch.bsdiff $WORK_DIR
|
||||||
|
|
||||||
|
# Check that the partition has enough space to apply the patch without
|
||||||
|
# copying. If it doesn't, we'll be testing the low-space condition
|
||||||
|
# when we intend to test the not-low-space condition.
|
||||||
|
testname "apply patch to new location (with enough space)"
|
||||||
|
free_kb=$(free_space $WORK_FS)
|
||||||
|
echo "${free_kb}kb free on /$WORK_FS."
|
||||||
|
if (( free_kb * 1024 < NEW_SIZE * 3 / 2 )); then
|
||||||
|
echo "Not enough space on /$WORK_FS to patch test file."
|
||||||
|
echo
|
||||||
|
echo "This doesn't mean that applypatch is necessarily broken;"
|
||||||
|
echo "just that /$WORK_FS doesn't have enough free space to"
|
||||||
|
echo "properly run this test."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
run_command rm $WORK_DIR/new.file
|
||||||
|
run_command rm $CACHE_TEMP_SOURCE
|
||||||
|
|
||||||
|
testname "apply bsdiff patch to new location"
|
||||||
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
|
$ADB pull $WORK_DIR/new.file $tmpdir/patched
|
||||||
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
|
testname "reapply bsdiff patch to new location"
|
||||||
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
|
$ADB pull $WORK_DIR/new.file $tmpdir/patched
|
||||||
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
|
$ADB push $DATA_DIR/old.file $CACHE_TEMP_SOURCE
|
||||||
|
# put some junk in the old file
|
||||||
|
run_command dd if=/dev/urandom of=$WORK_DIR/old.file count=100 bs=1024 || fail
|
||||||
|
|
||||||
|
testname "apply bsdiff patch to new location with corrupted source"
|
||||||
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo || fail
|
||||||
|
$ADB pull $WORK_DIR/new.file $tmpdir/patched
|
||||||
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
|
# put some junk in the cache copy, too
|
||||||
|
run_command dd if=/dev/urandom of=$CACHE_TEMP_SOURCE count=100 bs=1024 || fail
|
||||||
|
|
||||||
|
run_command rm $WORK_DIR/new.file
|
||||||
|
testname "apply bsdiff patch to new location with corrupted source and copy (no new file)"
|
||||||
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo && fail
|
||||||
|
|
||||||
|
# put some junk in the new file
|
||||||
|
run_command dd if=/dev/urandom of=$WORK_DIR/new.file count=100 bs=1024 || fail
|
||||||
|
|
||||||
|
testname "apply bsdiff patch to new location with corrupted source and copy (bad new file)"
|
||||||
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $WORK_DIR/new.file $NEW_SHA1 $NEW_SIZE $OLD_SHA1:$WORK_DIR/patch.bsdiff $BAD1_SHA1:$WORK_DIR/foo && fail
|
||||||
|
|
||||||
# --------------- apply patch with low space on /system ----------------------
|
# --------------- apply patch with low space on /system ----------------------
|
||||||
|
|
||||||
$ADB push $DATA_DIR/old.file $WORK_DIR
|
$ADB push $DATA_DIR/old.file $WORK_DIR
|
||||||
@@ -169,12 +248,12 @@ free_kb=$(free_space $WORK_FS)
|
|||||||
echo "${free_kb}kb free on /$WORK_FS now."
|
echo "${free_kb}kb free on /$WORK_FS now."
|
||||||
|
|
||||||
testname "apply bsdiff patch with low space"
|
testname "apply bsdiff patch with low space"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
testname "reapply bsdiff patch with low space"
|
testname "reapply bsdiff patch with low space"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
@@ -213,7 +292,7 @@ run_command ls /cache/subdir/a.file || fail # wasn't deleted because
|
|||||||
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
||||||
|
|
||||||
# should fail; not enough files can be deleted
|
# should fail; not enough files can be deleted
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff && fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff && fail
|
||||||
run_command ls /cache/bloat_large.dat || fail # wasn't deleted because it was open
|
run_command ls /cache/bloat_large.dat || fail # wasn't deleted because it was open
|
||||||
run_command ls /cache/subdir/a.file || fail # wasn't deleted because it's in a subdir
|
run_command ls /cache/subdir/a.file || fail # wasn't deleted because it's in a subdir
|
||||||
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
||||||
@@ -229,7 +308,7 @@ run_command ls /cache/subdir/a.file || fail # still wasn't deleted because i
|
|||||||
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
run_command ls $CACHE_TEMP_SOURCE || fail # wasn't deleted because it's the source file copy
|
||||||
|
|
||||||
# should succeed
|
# should succeed
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
run_command ls /cache/subdir/a.file || fail # still wasn't deleted because it's in a subdir
|
run_command ls /cache/subdir/a.file || fail # still wasn't deleted because it's in a subdir
|
||||||
@@ -242,7 +321,7 @@ $ADB push $DATA_DIR/old.file $CACHE_TEMP_SOURCE
|
|||||||
run_command dd if=/dev/urandom of=$WORK_DIR/old.file count=100 bs=1024 || fail
|
run_command dd if=/dev/urandom of=$WORK_DIR/old.file count=100 bs=1024 || fail
|
||||||
|
|
||||||
testname "apply bsdiff patch from cache (corrupted source) with low space"
|
testname "apply bsdiff patch from cache (corrupted source) with low space"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
@@ -251,20 +330,14 @@ $ADB push $DATA_DIR/old.file $CACHE_TEMP_SOURCE
|
|||||||
run_command rm $WORK_DIR/old.file
|
run_command rm $WORK_DIR/old.file
|
||||||
|
|
||||||
testname "apply bsdiff patch from cache (missing source) with low space"
|
testname "apply bsdiff patch from cache (missing source) with low space"
|
||||||
run_command $WORK_DIR/applypatch $WORK_DIR/old.file $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
run_command $WORK_DIR/applypatch $WORK_DIR/old.file - $NEW_SHA1 $NEW_SIZE $BAD1_SHA1:$WORK_DIR/foo $OLD_SHA1:$WORK_DIR/patch.bsdiff || fail
|
||||||
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
$ADB pull $WORK_DIR/old.file $tmpdir/patched
|
||||||
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
diff -q $DATA_DIR/new.file $tmpdir/patched || fail
|
||||||
|
|
||||||
|
|
||||||
# --------------- cleanup ----------------------
|
# --------------- cleanup ----------------------
|
||||||
|
|
||||||
# not necessary if we're about to kill the emulator, but nice for
|
cleanup
|
||||||
# running on real devices or already-running emulators.
|
|
||||||
run_command rm /cache/bloat*.dat $WORK_DIR/bloat.dat $CACHE_TEMP_SOURCE $WORK_DIR/old.file $WORK_DIR/patch.xdelta3 $WORK_DIR/patch.bsdiff $WORK_DIR/applypatch
|
|
||||||
|
|
||||||
kill $pid_emulator
|
|
||||||
|
|
||||||
rm -rf $tmpdir
|
|
||||||
|
|
||||||
echo
|
echo
|
||||||
echo PASS
|
echo PASS
|
||||||
|
@@ -29,6 +29,7 @@
|
|||||||
#include <bzlib.h>
|
#include <bzlib.h>
|
||||||
|
|
||||||
#include "mincrypt/sha.h"
|
#include "mincrypt/sha.h"
|
||||||
|
#include "applypatch.h"
|
||||||
|
|
||||||
void ShowBSDiffLicense() {
|
void ShowBSDiffLicense() {
|
||||||
puts("The bsdiff library used herein is:\n"
|
puts("The bsdiff library used herein is:\n"
|
||||||
@@ -80,10 +81,34 @@ static off_t offtin(u_char *buf)
|
|||||||
return y;
|
return y;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
||||||
const char* patch_filename,
|
const char* patch_filename, ssize_t patch_offset,
|
||||||
FILE* output, SHA_CTX* ctx) {
|
FILE* output, SHA_CTX* ctx) {
|
||||||
|
|
||||||
|
unsigned char* new_data;
|
||||||
|
ssize_t new_size;
|
||||||
|
if (ApplyBSDiffPatchMem(old_data, old_size, patch_filename, patch_offset,
|
||||||
|
&new_data, &new_size) != 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fwrite(new_data, 1, new_size, output) < new_size) {
|
||||||
|
fprintf(stderr, "short write of output: %d (%s)\n", errno, strerror(errno));
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (ctx) {
|
||||||
|
SHA_update(ctx, new_data, new_size);
|
||||||
|
}
|
||||||
|
free(new_data);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int ApplyBSDiffPatchMem(const unsigned char* old_data, ssize_t old_size,
|
||||||
|
const char* patch_filename, ssize_t patch_offset,
|
||||||
|
unsigned char** new_data, ssize_t* new_size) {
|
||||||
|
|
||||||
FILE* f;
|
FILE* f;
|
||||||
if ((f = fopen(patch_filename, "rb")) == NULL) {
|
if ((f = fopen(patch_filename, "rb")) == NULL) {
|
||||||
fprintf(stderr, "failed to open patch file\n");
|
fprintf(stderr, "failed to open patch file\n");
|
||||||
@@ -102,6 +127,8 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
// from oldfile to x bytes from the diff block; copy y bytes from the
|
// from oldfile to x bytes from the diff block; copy y bytes from the
|
||||||
// extra block; seek forwards in oldfile by z bytes".
|
// extra block; seek forwards in oldfile by z bytes".
|
||||||
|
|
||||||
|
fseek(f, patch_offset, SEEK_SET);
|
||||||
|
|
||||||
unsigned char header[32];
|
unsigned char header[32];
|
||||||
if (fread(header, 1, 32, f) < 32) {
|
if (fread(header, 1, 32, f) < 32) {
|
||||||
fprintf(stderr, "failed to read patch file header\n");
|
fprintf(stderr, "failed to read patch file header\n");
|
||||||
@@ -109,17 +136,16 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (memcmp(header, "BSDIFF40", 8) != 0) {
|
if (memcmp(header, "BSDIFF40", 8) != 0) {
|
||||||
fprintf(stderr, "corrupt patch file header (magic number)\n");
|
fprintf(stderr, "corrupt bsdiff patch file header (magic number)\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
ssize_t ctrl_len, data_len;
|
ssize_t ctrl_len, data_len;
|
||||||
ssize_t new_size;
|
|
||||||
ctrl_len = offtin(header+8);
|
ctrl_len = offtin(header+8);
|
||||||
data_len = offtin(header+16);
|
data_len = offtin(header+16);
|
||||||
new_size = offtin(header+24);
|
*new_size = offtin(header+24);
|
||||||
|
|
||||||
if (ctrl_len < 0 || data_len < 0 || new_size < 0) {
|
if (ctrl_len < 0 || data_len < 0 || *new_size < 0) {
|
||||||
fprintf(stderr, "corrupt patch file header (data lengths)\n");
|
fprintf(stderr, "corrupt patch file header (data lengths)\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
@@ -135,7 +161,7 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
fprintf(stderr, "failed to open patch file\n"); \
|
fprintf(stderr, "failed to open patch file\n"); \
|
||||||
return 1; \
|
return 1; \
|
||||||
} \
|
} \
|
||||||
if (fseeko(f, offset, SEEK_SET)) { \
|
if (fseeko(f, offset+patch_offset, SEEK_SET)) { \
|
||||||
fprintf(stderr, "failed to seek in patch file\n"); \
|
fprintf(stderr, "failed to seek in patch file\n"); \
|
||||||
return 1; \
|
return 1; \
|
||||||
} \
|
} \
|
||||||
@@ -150,9 +176,10 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
|
|
||||||
#undef OPEN_AT
|
#undef OPEN_AT
|
||||||
|
|
||||||
unsigned char* new_data = malloc(new_size);
|
*new_data = malloc(*new_size);
|
||||||
if (new_data == NULL) {
|
if (*new_data == NULL) {
|
||||||
fprintf(stderr, "failed to allocate memory for output file\n");
|
fprintf(stderr, "failed to allocate %d bytes of memory for output file\n",
|
||||||
|
(int)*new_size);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -161,7 +188,7 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
off_t len_read;
|
off_t len_read;
|
||||||
int i;
|
int i;
|
||||||
unsigned char buf[8];
|
unsigned char buf[8];
|
||||||
while (newpos < new_size) {
|
while (newpos < *new_size) {
|
||||||
// Read control data
|
// Read control data
|
||||||
for (i = 0; i < 3; ++i) {
|
for (i = 0; i < 3; ++i) {
|
||||||
len_read = BZ2_bzRead(&bzerr, cpfbz2, buf, 8);
|
len_read = BZ2_bzRead(&bzerr, cpfbz2, buf, 8);
|
||||||
@@ -173,13 +200,13 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if (newpos + ctrl[0] > new_size) {
|
if (newpos + ctrl[0] > *new_size) {
|
||||||
fprintf(stderr, "corrupt patch (new file overrun)\n");
|
fprintf(stderr, "corrupt patch (new file overrun)\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read diff string
|
// Read diff string
|
||||||
len_read = BZ2_bzRead(&bzerr, dpfbz2, new_data + newpos, ctrl[0]);
|
len_read = BZ2_bzRead(&bzerr, dpfbz2, *new_data + newpos, ctrl[0]);
|
||||||
if (len_read < ctrl[0] || !(bzerr == BZ_OK || bzerr == BZ_STREAM_END)) {
|
if (len_read < ctrl[0] || !(bzerr == BZ_OK || bzerr == BZ_STREAM_END)) {
|
||||||
fprintf(stderr, "corrupt patch (read diff)\n");
|
fprintf(stderr, "corrupt patch (read diff)\n");
|
||||||
return 1;
|
return 1;
|
||||||
@@ -188,7 +215,7 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
// Add old data to diff string
|
// Add old data to diff string
|
||||||
for (i = 0; i < ctrl[0]; ++i) {
|
for (i = 0; i < ctrl[0]; ++i) {
|
||||||
if ((oldpos+i >= 0) && (oldpos+i < old_size)) {
|
if ((oldpos+i >= 0) && (oldpos+i < old_size)) {
|
||||||
new_data[newpos+i] += old_data[oldpos+i];
|
(*new_data)[newpos+i] += old_data[oldpos+i];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -197,13 +224,13 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
oldpos += ctrl[0];
|
oldpos += ctrl[0];
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if (newpos + ctrl[1] > new_size) {
|
if (newpos + ctrl[1] > *new_size) {
|
||||||
fprintf(stderr, "corrupt patch (new file overrun)\n");
|
fprintf(stderr, "corrupt patch (new file overrun)\n");
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read extra string
|
// Read extra string
|
||||||
len_read = BZ2_bzRead(&bzerr, epfbz2, new_data + newpos, ctrl[1]);
|
len_read = BZ2_bzRead(&bzerr, epfbz2, *new_data + newpos, ctrl[1]);
|
||||||
if (len_read < ctrl[1] || !(bzerr == BZ_OK || bzerr == BZ_STREAM_END)) {
|
if (len_read < ctrl[1] || !(bzerr == BZ_OK || bzerr == BZ_STREAM_END)) {
|
||||||
fprintf(stderr, "corrupt patch (read extra)\n");
|
fprintf(stderr, "corrupt patch (read extra)\n");
|
||||||
return 1;
|
return 1;
|
||||||
@@ -221,12 +248,5 @@ int ApplyBSDiffPatch(const unsigned char* old_data, ssize_t old_size,
|
|||||||
fclose(dpf);
|
fclose(dpf);
|
||||||
fclose(epf);
|
fclose(epf);
|
||||||
|
|
||||||
if (fwrite(new_data, 1, new_size, output) < new_size) {
|
|
||||||
fprintf(stderr, "short write of output: %d (%s)\n", errno, strerror(errno));
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
SHA_update(ctx, new_data, new_size);
|
|
||||||
free(new_data);
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
560
tools/applypatch/imgdiff.c
Normal file
560
tools/applypatch/imgdiff.c
Normal file
@@ -0,0 +1,560 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2009 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This program constructs binary patches for images -- such as boot.img
|
||||||
|
* and recovery.img -- that consist primarily of large chunks of gzipped
|
||||||
|
* data interspersed with uncompressed data. Doing a naive bsdiff of
|
||||||
|
* these files is not useful because small changes in the data lead to
|
||||||
|
* large changes in the compressed bitstream; bsdiff patches of gzipped
|
||||||
|
* data are typically as large as the data itself.
|
||||||
|
*
|
||||||
|
* To patch these usefully, we break the source and target images up into
|
||||||
|
* chunks of two types: "normal" and "gzip". Normal chunks are simply
|
||||||
|
* patched using a plain bsdiff. Gzip chunks are first expanded, then a
|
||||||
|
* bsdiff is applied to the uncompressed data, then the patched data is
|
||||||
|
* gzipped using the same encoder parameters. Patched chunks are
|
||||||
|
* concatenated together to create the output file; the output image
|
||||||
|
* should be *exactly* the same series of bytes as the target image used
|
||||||
|
* originally to generate the patch.
|
||||||
|
*
|
||||||
|
* To work well with this tool, the gzipped sections of the target
|
||||||
|
* image must have been generated using the same deflate encoder that
|
||||||
|
* is available in applypatch, namely, the one in the zlib library.
|
||||||
|
* In practice this means that images should be compressed using the
|
||||||
|
* "minigzip" tool included in the zlib distribution, not the GNU gzip
|
||||||
|
* program.
|
||||||
|
*
|
||||||
|
* An "imgdiff" patch consists of a header describing the chunk structure
|
||||||
|
* of the file and any encoding parameters needed for the gzipped
|
||||||
|
* chunks, followed by N bsdiff patches, one per chunk.
|
||||||
|
*
|
||||||
|
* For a diff to be generated, the source and target images must have the
|
||||||
|
* same "chunk" structure: that is, the same number of gzipped and normal
|
||||||
|
* chunks in the same order. Android boot and recovery images currently
|
||||||
|
* consist of five chunks: a small normal header, a gzipped kernel, a
|
||||||
|
* small normal section, a gzipped ramdisk, and finally a small normal
|
||||||
|
* footer.
|
||||||
|
*
|
||||||
|
* Caveats: we locate gzipped sections within the source and target
|
||||||
|
* images by searching for the byte sequence 1f8b0800: 1f8b is the gzip
|
||||||
|
* magic number; 08 specifies the "deflate" encoding [the only encoding
|
||||||
|
* supported by the gzip standard]; and 00 is the flags byte. We do not
|
||||||
|
* currently support any extra header fields (which would be indicated by
|
||||||
|
* a nonzero flags byte). We also don't handle the case when that byte
|
||||||
|
* sequence appears spuriously in the file. (Note that it would have to
|
||||||
|
* occur spuriously within a normal chunk to be a problem.)
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* The imgdiff patch header looks like this:
|
||||||
|
*
|
||||||
|
* "IMGDIFF1" (8) [magic number and version]
|
||||||
|
* chunk count (4)
|
||||||
|
* for each chunk:
|
||||||
|
* chunk type (4) [CHUNK_NORMAL or CHUNK_GZIP]
|
||||||
|
* source start (8)
|
||||||
|
* source len (8)
|
||||||
|
* bsdiff patch offset (8) [from start of patch file]
|
||||||
|
* if chunk type == CHUNK_GZIP:
|
||||||
|
* source expanded len (8) [size of uncompressed source]
|
||||||
|
* target expected len (8) [size of uncompressed target]
|
||||||
|
* gzip level (4)
|
||||||
|
* method (4)
|
||||||
|
* windowBits (4)
|
||||||
|
* memLevel (4)
|
||||||
|
* strategy (4)
|
||||||
|
* gzip header len (4)
|
||||||
|
* gzip header (gzip header len)
|
||||||
|
* gzip footer (8)
|
||||||
|
*
|
||||||
|
* All integers are little-endian. "source start" and "source len"
|
||||||
|
* specify the section of the input image that comprises this chunk,
|
||||||
|
* including the gzip header and footer for gzip chunks. "source
|
||||||
|
* expanded len" is the size of the uncompressed source data. "target
|
||||||
|
* expected len" is the size of the uncompressed data after applying
|
||||||
|
* the bsdiff patch. The next five parameters specify the zlib
|
||||||
|
* parameters to be used when compressing the patched data, and the
|
||||||
|
* next three specify the header and footer to be wrapped around the
|
||||||
|
* compressed data to create the output chunk (so that header contents
|
||||||
|
* like the timestamp are recreated exactly).
|
||||||
|
*
|
||||||
|
* After the header there are 'chunk count' bsdiff patches; the offset
|
||||||
|
* of each from the beginning of the file is specified in the header.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <errno.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
|
||||||
|
#include "zlib.h"
|
||||||
|
#include "imgdiff.h"
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
int type; // CHUNK_NORMAL or CHUNK_GZIP
|
||||||
|
size_t start; // offset of chunk in original image file
|
||||||
|
|
||||||
|
size_t len;
|
||||||
|
unsigned char* data; // data to be patched (ie, uncompressed, for
|
||||||
|
// gzip chunks)
|
||||||
|
|
||||||
|
// everything else is for CHUNK_GZIP chunks only:
|
||||||
|
|
||||||
|
size_t gzip_header_len;
|
||||||
|
unsigned char* gzip_header;
|
||||||
|
unsigned char* gzip_footer;
|
||||||
|
|
||||||
|
// original (compressed) gzip data, including header and footer
|
||||||
|
size_t gzip_len;
|
||||||
|
unsigned char* gzip_data;
|
||||||
|
|
||||||
|
// deflate encoder parameters
|
||||||
|
int level, method, windowBits, memLevel, strategy;
|
||||||
|
} ImageChunk;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Read the given file and break it up into chunks, putting the number
|
||||||
|
* of chunks and their info in *num_chunks and **chunks,
|
||||||
|
* respectively. Returns a malloc'd block of memory containing the
|
||||||
|
* contents of the file; various pointers in the output chunk array
|
||||||
|
* will point into this block of memory. The caller should free the
|
||||||
|
* return value when done with all the chunks. Returns NULL on
|
||||||
|
* failure.
|
||||||
|
*/
|
||||||
|
unsigned char* ReadImage(const char* filename,
|
||||||
|
int* num_chunks, ImageChunk** chunks) {
|
||||||
|
struct stat st;
|
||||||
|
if (stat(filename, &st) != 0) {
|
||||||
|
fprintf(stderr, "failed to stat \"%s\": %s\n", filename, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned char* img = malloc(st.st_size + 4);
|
||||||
|
FILE* f = fopen(filename, "rb");
|
||||||
|
if (fread(img, 1, st.st_size, f) != st.st_size) {
|
||||||
|
fprintf(stderr, "failed to read \"%s\" %s\n", filename, strerror(errno));
|
||||||
|
fclose(f);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
fclose(f);
|
||||||
|
|
||||||
|
// append 4 zero bytes to the data so we can always search for the
|
||||||
|
// four-byte string 1f8b0800 starting at any point in the actual
|
||||||
|
// file data, without special-casing the end of the data.
|
||||||
|
memset(img+st.st_size, 0, 4);
|
||||||
|
|
||||||
|
size_t pos = 0;
|
||||||
|
|
||||||
|
*num_chunks = 0;
|
||||||
|
*chunks = NULL;
|
||||||
|
|
||||||
|
while (pos < st.st_size) {
|
||||||
|
unsigned char* p = img+pos;
|
||||||
|
|
||||||
|
// Reallocate the list for every chunk; we expect the number of
|
||||||
|
// chunks to be small (5 for typical boot and recovery images).
|
||||||
|
++*num_chunks;
|
||||||
|
*chunks = realloc(*chunks, *num_chunks * sizeof(ImageChunk));
|
||||||
|
ImageChunk* curr = *chunks + (*num_chunks-1);
|
||||||
|
curr->start = pos;
|
||||||
|
|
||||||
|
if (st.st_size - pos >= 4 &&
|
||||||
|
p[0] == 0x1f && p[1] == 0x8b &&
|
||||||
|
p[2] == 0x08 && // deflate compression
|
||||||
|
p[3] == 0x00) { // no header flags
|
||||||
|
// 'pos' is the offset of the start of a gzip chunk.
|
||||||
|
|
||||||
|
curr->type = CHUNK_GZIP;
|
||||||
|
curr->gzip_header_len = GZIP_HEADER_LEN;
|
||||||
|
curr->gzip_header = p;
|
||||||
|
|
||||||
|
// We must decompress this chunk in order to discover where it
|
||||||
|
// ends, and so we can put the uncompressed data and its length
|
||||||
|
// into curr->data and curr->len;
|
||||||
|
|
||||||
|
size_t allocated = 32768;
|
||||||
|
curr->len = 0;
|
||||||
|
curr->data = malloc(allocated);
|
||||||
|
curr->gzip_data = p;
|
||||||
|
|
||||||
|
z_stream strm;
|
||||||
|
strm.zalloc = Z_NULL;
|
||||||
|
strm.zfree = Z_NULL;
|
||||||
|
strm.opaque = Z_NULL;
|
||||||
|
strm.avail_in = st.st_size - (pos + curr->gzip_header_len);
|
||||||
|
strm.next_in = p + GZIP_HEADER_LEN;
|
||||||
|
|
||||||
|
// -15 means we are decoding a 'raw' deflate stream; zlib will
|
||||||
|
// not expect zlib headers.
|
||||||
|
int ret = inflateInit2(&strm, -15);
|
||||||
|
|
||||||
|
do {
|
||||||
|
strm.avail_out = allocated - curr->len;
|
||||||
|
strm.next_out = curr->data + curr->len;
|
||||||
|
ret = inflate(&strm, Z_NO_FLUSH);
|
||||||
|
curr->len = allocated - strm.avail_out;
|
||||||
|
if (strm.avail_out == 0) {
|
||||||
|
allocated *= 2;
|
||||||
|
curr->data = realloc(curr->data, allocated);
|
||||||
|
}
|
||||||
|
} while (ret != Z_STREAM_END);
|
||||||
|
|
||||||
|
curr->gzip_len = st.st_size - strm.avail_in - pos + GZIP_FOOTER_LEN;
|
||||||
|
pos = st.st_size - strm.avail_in;
|
||||||
|
inflateEnd(&strm);
|
||||||
|
|
||||||
|
// consume the gzip footer.
|
||||||
|
curr->gzip_footer = img+pos;
|
||||||
|
pos += GZIP_FOOTER_LEN;
|
||||||
|
p = img+pos;
|
||||||
|
|
||||||
|
// The footer (that we just skipped over) contains the size of
|
||||||
|
// the uncompressed data. Double-check to make sure that it
|
||||||
|
// matches the size of the data we got when we actually did
|
||||||
|
// the decompression.
|
||||||
|
size_t footer_size = p[-4] + (p[-3] << 8) + (p[-2] << 16) + (p[-1] << 24);
|
||||||
|
if (footer_size != curr->len) {
|
||||||
|
fprintf(stderr, "Error: footer size %d != decompressed size %d\n",
|
||||||
|
footer_size, curr->len);
|
||||||
|
free(img);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 'pos' is not the offset of the start of a gzip chunk, so scan
|
||||||
|
// forward until we find a gzip header.
|
||||||
|
curr->type = CHUNK_NORMAL;
|
||||||
|
curr->data = p;
|
||||||
|
|
||||||
|
for (curr->len = 0; curr->len < (st.st_size - pos); ++curr->len) {
|
||||||
|
if (p[curr->len] == 0x1f &&
|
||||||
|
p[curr->len+1] == 0x8b &&
|
||||||
|
p[curr->len+2] == 0x08 &&
|
||||||
|
p[curr->len+3] == 0x00) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pos += curr->len;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return img;
|
||||||
|
}
|
||||||
|
|
||||||
|
#define BUFFER_SIZE 32768
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Takes the uncompressed data stored in the chunk, compresses it
|
||||||
|
* using the zlib parameters stored in the chunk, and checks that it
|
||||||
|
* matches exactly the compressed data we started with (also stored in
|
||||||
|
* the chunk). Return 0 on success.
|
||||||
|
*/
|
||||||
|
int TryReconstruction(ImageChunk* chunk, unsigned char* out) {
|
||||||
|
size_t p = chunk->gzip_header_len;
|
||||||
|
|
||||||
|
z_stream strm;
|
||||||
|
strm.zalloc = Z_NULL;
|
||||||
|
strm.zfree = Z_NULL;
|
||||||
|
strm.opaque = Z_NULL;
|
||||||
|
strm.avail_in = chunk->len;
|
||||||
|
strm.next_in = chunk->data;
|
||||||
|
int ret;
|
||||||
|
ret = deflateInit2(&strm, chunk->level, chunk->method, chunk->windowBits,
|
||||||
|
chunk->memLevel, chunk->strategy);
|
||||||
|
do {
|
||||||
|
strm.avail_out = BUFFER_SIZE;
|
||||||
|
strm.next_out = out;
|
||||||
|
ret = deflate(&strm, Z_FINISH);
|
||||||
|
size_t have = BUFFER_SIZE - strm.avail_out;
|
||||||
|
|
||||||
|
if (memcmp(out, chunk->gzip_data+p, have) != 0) {
|
||||||
|
// mismatch; data isn't the same.
|
||||||
|
deflateEnd(&strm);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
p += have;
|
||||||
|
} while (ret != Z_STREAM_END);
|
||||||
|
deflateEnd(&strm);
|
||||||
|
if (p + GZIP_FOOTER_LEN != chunk->gzip_len) {
|
||||||
|
// mismatch; ran out of data before we should have.
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verify that we can reproduce exactly the same compressed data that
|
||||||
|
* we started with. Sets the level, method, windowBits, memLevel, and
|
||||||
|
* strategy fields in the chunk to the encoding parameters needed to
|
||||||
|
* produce the right output. Returns 0 on success.
|
||||||
|
*/
|
||||||
|
int ReconstructGzipChunk(ImageChunk* chunk) {
|
||||||
|
if (chunk->type != CHUNK_GZIP) {
|
||||||
|
fprintf(stderr, "attempt to reconstruct non-gzip chunk\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t p = 0;
|
||||||
|
unsigned char* out = malloc(BUFFER_SIZE);
|
||||||
|
|
||||||
|
// We only check two combinations of encoder parameters: level 6
|
||||||
|
// (the default) and level 9 (the maximum).
|
||||||
|
for (chunk->level = 6; chunk->level <= 9; chunk->level += 3) {
|
||||||
|
chunk->windowBits = -15; // 32kb window; negative to indicate a raw stream.
|
||||||
|
chunk->memLevel = 8; // the default value.
|
||||||
|
chunk->method = Z_DEFLATED;
|
||||||
|
chunk->strategy = Z_DEFAULT_STRATEGY;
|
||||||
|
|
||||||
|
if (TryReconstruction(chunk, out) == 0) {
|
||||||
|
free(out);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
free(out);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write a 4-byte value to f in little-endian order. */
|
||||||
|
void Write4(int value, FILE* f) {
|
||||||
|
fputc(value & 0xff, f);
|
||||||
|
fputc((value >> 8) & 0xff, f);
|
||||||
|
fputc((value >> 16) & 0xff, f);
|
||||||
|
fputc((value >> 24) & 0xff, f);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Write an 8-byte value to f in little-endian order. */
|
||||||
|
void Write8(long long value, FILE* f) {
|
||||||
|
fputc(value & 0xff, f);
|
||||||
|
fputc((value >> 8) & 0xff, f);
|
||||||
|
fputc((value >> 16) & 0xff, f);
|
||||||
|
fputc((value >> 24) & 0xff, f);
|
||||||
|
fputc((value >> 32) & 0xff, f);
|
||||||
|
fputc((value >> 40) & 0xff, f);
|
||||||
|
fputc((value >> 48) & 0xff, f);
|
||||||
|
fputc((value >> 56) & 0xff, f);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Given source and target chunks, compute a bsdiff patch between them
|
||||||
|
* by running bsdiff in a subprocess. Return the patch data, placing
|
||||||
|
* its length in *size. Return NULL on failure. We expect the bsdiff
|
||||||
|
* program to be in the path.
|
||||||
|
*/
|
||||||
|
unsigned char* MakePatch(ImageChunk* src, ImageChunk* tgt, size_t* size) {
|
||||||
|
char stemp[] = "/tmp/imgdiff-src-XXXXXX";
|
||||||
|
char ttemp[] = "/tmp/imgdiff-tgt-XXXXXX";
|
||||||
|
char ptemp[] = "/tmp/imgdiff-patch-XXXXXX";
|
||||||
|
mkstemp(stemp);
|
||||||
|
mkstemp(ttemp);
|
||||||
|
mkstemp(ptemp);
|
||||||
|
|
||||||
|
FILE* f = fopen(stemp, "wb");
|
||||||
|
if (f == NULL) {
|
||||||
|
fprintf(stderr, "failed to open src chunk %s: %s\n",
|
||||||
|
stemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
if (fwrite(src->data, 1, src->len, f) != src->len) {
|
||||||
|
fprintf(stderr, "failed to write src chunk to %s: %s\n",
|
||||||
|
stemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
fclose(f);
|
||||||
|
|
||||||
|
f = fopen(ttemp, "wb");
|
||||||
|
if (f == NULL) {
|
||||||
|
fprintf(stderr, "failed to open tgt chunk %s: %s\n",
|
||||||
|
ttemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
if (fwrite(tgt->data, 1, tgt->len, f) != tgt->len) {
|
||||||
|
fprintf(stderr, "failed to write tgt chunk to %s: %s\n",
|
||||||
|
ttemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
fclose(f);
|
||||||
|
|
||||||
|
char cmd[200];
|
||||||
|
sprintf(cmd, "bsdiff %s %s %s", stemp, ttemp, ptemp);
|
||||||
|
if (system(cmd) != 0) {
|
||||||
|
fprintf(stderr, "failed to run bsdiff: %s\n", strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct stat st;
|
||||||
|
if (stat(ptemp, &st) != 0) {
|
||||||
|
fprintf(stderr, "failed to stat patch file %s: %s\n",
|
||||||
|
ptemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned char* data = malloc(st.st_size);
|
||||||
|
*size = st.st_size;
|
||||||
|
|
||||||
|
f = fopen(ptemp, "rb");
|
||||||
|
if (f == NULL) {
|
||||||
|
fprintf(stderr, "failed to open patch %s: %s\n", ptemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
if (fread(data, 1, st.st_size, f) != st.st_size) {
|
||||||
|
fprintf(stderr, "failed to read patch %s: %s\n", ptemp, strerror(errno));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
fclose(f);
|
||||||
|
|
||||||
|
unlink(stemp);
|
||||||
|
unlink(ttemp);
|
||||||
|
unlink(ptemp);
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Cause a gzip chunk to be treated as a normal chunk (ie, as a blob
|
||||||
|
* of uninterpreted data). The resulting patch will likely be about
|
||||||
|
* as big as the target file, but it lets us handle the case of images
|
||||||
|
* where some gzip chunks are reconstructible but others aren't (by
|
||||||
|
* treating the ones that aren't as normal chunks).
|
||||||
|
*/
|
||||||
|
void ChangeGzipChunkToNormal(ImageChunk* ch) {
|
||||||
|
ch->type = CHUNK_NORMAL;
|
||||||
|
free(ch->data);
|
||||||
|
ch->data = ch->gzip_data;
|
||||||
|
ch->len = ch->gzip_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char** argv) {
|
||||||
|
if (argc != 4) {
|
||||||
|
fprintf(stderr, "usage: %s <src-img> <tgt-img> <patch-file>\n", argv[0]);
|
||||||
|
return 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
int num_src_chunks;
|
||||||
|
ImageChunk* src_chunks;
|
||||||
|
if (ReadImage(argv[1], &num_src_chunks, &src_chunks) == NULL) {
|
||||||
|
fprintf(stderr, "failed to break apart source image\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int num_tgt_chunks;
|
||||||
|
ImageChunk* tgt_chunks;
|
||||||
|
if (ReadImage(argv[2], &num_tgt_chunks, &tgt_chunks) == NULL) {
|
||||||
|
fprintf(stderr, "failed to break apart target image\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the source and target images have the same chunk
|
||||||
|
// structure (ie, the same sequence of gzip and normal chunks).
|
||||||
|
|
||||||
|
if (num_src_chunks != num_tgt_chunks) {
|
||||||
|
fprintf(stderr, "source and target don't have same number of chunks!\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
int i;
|
||||||
|
for (i = 0; i < num_src_chunks; ++i) {
|
||||||
|
if (src_chunks[i].type != tgt_chunks[i].type) {
|
||||||
|
fprintf(stderr, "source and target don't have same chunk "
|
||||||
|
"structure! (chunk %d)\n", i);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Confirm that given the uncompressed chunk data in the target, we
|
||||||
|
// can recompress it and get exactly the same bits as are in the
|
||||||
|
// input target image. If this fails, treat the chunk as a normal
|
||||||
|
// non-gzipped chunk.
|
||||||
|
|
||||||
|
for (i = 0; i < num_tgt_chunks; ++i) {
|
||||||
|
if (tgt_chunks[i].type == CHUNK_GZIP) {
|
||||||
|
if (ReconstructGzipChunk(tgt_chunks+i) < 0) {
|
||||||
|
printf("failed to reconstruct target gzip chunk %d; "
|
||||||
|
"treating as normal chunk\n", i);
|
||||||
|
ChangeGzipChunkToNormal(tgt_chunks+i);
|
||||||
|
ChangeGzipChunkToNormal(src_chunks+i);
|
||||||
|
} else {
|
||||||
|
printf("reconstructed target gzip chunk %d\n", i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute bsdiff patches for each chunk's data (the uncompressed
|
||||||
|
// data, in the case of gzip chunks).
|
||||||
|
|
||||||
|
unsigned char** patch_data = malloc(num_src_chunks * sizeof(unsigned char*));
|
||||||
|
size_t* patch_size = malloc(num_src_chunks * sizeof(size_t));
|
||||||
|
for (i = 0; i < num_src_chunks; ++i) {
|
||||||
|
patch_data[i] = MakePatch(src_chunks+i, tgt_chunks+i, patch_size+i);
|
||||||
|
printf("patch %d is %d bytes (of %d)\n", i, patch_size[i],
|
||||||
|
tgt_chunks[i].type == CHUNK_NORMAL ? tgt_chunks[i].len : tgt_chunks[i].gzip_len);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Figure out how big the imgdiff file header is going to be, so
|
||||||
|
// that we can correctly compute the offset of each bsdiff patch
|
||||||
|
// within the file.
|
||||||
|
|
||||||
|
size_t total_header_size = 12;
|
||||||
|
for (i = 0; i < num_src_chunks; ++i) {
|
||||||
|
total_header_size += 4 + 8*3;
|
||||||
|
if (src_chunks[i].type == CHUNK_GZIP) {
|
||||||
|
total_header_size += 8*2 + 4*6 + tgt_chunks[i].gzip_header_len + 8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t offset = total_header_size;
|
||||||
|
|
||||||
|
FILE* f = fopen(argv[3], "wb");
|
||||||
|
|
||||||
|
// Write out the headers.
|
||||||
|
|
||||||
|
fwrite("IMGDIFF1", 1, 8, f);
|
||||||
|
Write4(num_src_chunks, f);
|
||||||
|
for (i = 0; i < num_tgt_chunks; ++i) {
|
||||||
|
Write4(tgt_chunks[i].type, f);
|
||||||
|
Write8(src_chunks[i].start, f);
|
||||||
|
Write8(src_chunks[i].type == CHUNK_NORMAL ? src_chunks[i].len :
|
||||||
|
(src_chunks[i].gzip_len + src_chunks[i].gzip_header_len + 8), f);
|
||||||
|
Write8(offset, f);
|
||||||
|
|
||||||
|
if (tgt_chunks[i].type == CHUNK_GZIP) {
|
||||||
|
Write8(src_chunks[i].len, f);
|
||||||
|
Write8(tgt_chunks[i].len, f);
|
||||||
|
Write4(tgt_chunks[i].level, f);
|
||||||
|
Write4(tgt_chunks[i].method, f);
|
||||||
|
Write4(tgt_chunks[i].windowBits, f);
|
||||||
|
Write4(tgt_chunks[i].memLevel, f);
|
||||||
|
Write4(tgt_chunks[i].strategy, f);
|
||||||
|
Write4(tgt_chunks[i].gzip_header_len, f);
|
||||||
|
fwrite(tgt_chunks[i].gzip_header, 1, tgt_chunks[i].gzip_header_len, f);
|
||||||
|
fwrite(tgt_chunks[i].gzip_footer, 1, GZIP_FOOTER_LEN, f);
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += patch_size[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append each chunk's bsdiff patch, in order.
|
||||||
|
|
||||||
|
for (i = 0; i < num_tgt_chunks; ++i) {
|
||||||
|
fwrite(patch_data[i], 1, patch_size[i], f);
|
||||||
|
}
|
||||||
|
|
||||||
|
fclose(f);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
28
tools/applypatch/imgdiff.h
Normal file
28
tools/applypatch/imgdiff.h
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2009 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Image patch chunk types
|
||||||
|
#define CHUNK_NORMAL 0
|
||||||
|
#define CHUNK_GZIP 1
|
||||||
|
|
||||||
|
// The gzip header size is actually variable, but we currently don't
|
||||||
|
// support gzipped data with any of the optional fields, so for now it
|
||||||
|
// will always be ten bytes. See RFC 1952 for the definition of the
|
||||||
|
// gzip format.
|
||||||
|
#define GZIP_HEADER_LEN 10
|
||||||
|
|
||||||
|
// The gzip footer size really is fixed.
|
||||||
|
#define GZIP_FOOTER_LEN 8
|
228
tools/applypatch/imgpatch.c
Normal file
228
tools/applypatch/imgpatch.c
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2009 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// See imgdiff.c in this directory for a description of the patch file
|
||||||
|
// format.
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <sys/stat.h>
|
||||||
|
#include <errno.h>
|
||||||
|
#include <unistd.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#include "zlib.h"
|
||||||
|
#include "mincrypt/sha.h"
|
||||||
|
#include "applypatch.h"
|
||||||
|
#include "imgdiff.h"
|
||||||
|
|
||||||
|
int Read4(unsigned char* p) {
|
||||||
|
return (int)(((unsigned int)p[3] << 24) |
|
||||||
|
((unsigned int)p[2] << 16) |
|
||||||
|
((unsigned int)p[1] << 8) |
|
||||||
|
(unsigned int)p[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
long long Read8(unsigned char* p) {
|
||||||
|
return (long long)(((unsigned long long)p[7] << 56) |
|
||||||
|
((unsigned long long)p[6] << 48) |
|
||||||
|
((unsigned long long)p[5] << 40) |
|
||||||
|
((unsigned long long)p[4] << 32) |
|
||||||
|
((unsigned long long)p[3] << 24) |
|
||||||
|
((unsigned long long)p[2] << 16) |
|
||||||
|
((unsigned long long)p[1] << 8) |
|
||||||
|
(unsigned long long)p[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Apply the patch given in 'patch_filename' to the source data given
|
||||||
|
* by (old_data, old_size). Write the patched output to the 'output'
|
||||||
|
* file, and update the SHA context with the output data as well.
|
||||||
|
* Return 0 on success.
|
||||||
|
*/
|
||||||
|
int ApplyImagePatch(const unsigned char* old_data, ssize_t old_size,
|
||||||
|
const char* patch_filename,
|
||||||
|
FILE* output, SHA_CTX* ctx) {
|
||||||
|
FILE* f;
|
||||||
|
if ((f = fopen(patch_filename, "rb")) == NULL) {
|
||||||
|
fprintf(stderr, "failed to open patch file\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned char header[12];
|
||||||
|
if (fread(header, 1, 12, f) != 12) {
|
||||||
|
fprintf(stderr, "failed to read patch file header\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (memcmp(header, "IMGDIFF1", 8) != 0) {
|
||||||
|
fprintf(stderr, "corrupt patch file header (magic number)\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int num_chunks = Read4(header+8);
|
||||||
|
|
||||||
|
int i;
|
||||||
|
for (i = 0; i < num_chunks; ++i) {
|
||||||
|
// each chunk's header record starts with 28 bytes (4 + 8*3).
|
||||||
|
unsigned char chunk[28];
|
||||||
|
if (fread(chunk, 1, 28, f) != 28) {
|
||||||
|
fprintf(stderr, "failed to read chunk %d record\n", i);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int type = Read4(chunk);
|
||||||
|
size_t src_start = Read8(chunk+4);
|
||||||
|
size_t src_len = Read8(chunk+12);
|
||||||
|
size_t patch_offset = Read8(chunk+20);
|
||||||
|
|
||||||
|
if (type == CHUNK_NORMAL) {
|
||||||
|
fprintf(stderr, "CHUNK %d: normal patch offset %d\n", i, patch_offset);
|
||||||
|
|
||||||
|
ApplyBSDiffPatch(old_data + src_start, src_len,
|
||||||
|
patch_filename, patch_offset,
|
||||||
|
output, ctx);
|
||||||
|
} else if (type == CHUNK_GZIP) {
|
||||||
|
fprintf(stderr, "CHUNK %d: gzip patch offset %d\n", i, patch_offset);
|
||||||
|
|
||||||
|
// gzip chunks have an additional 40 + gzip_header_len + 8 bytes
|
||||||
|
// in their chunk header.
|
||||||
|
unsigned char* gzip = malloc(40);
|
||||||
|
if (fread(gzip, 1, 40, f) != 40) {
|
||||||
|
fprintf(stderr, "failed to read chunk %d initial gzip data\n", i);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
size_t gzip_header_len = Read4(gzip+36);
|
||||||
|
gzip = realloc(gzip, 40 + gzip_header_len + 8);
|
||||||
|
if (fread(gzip+40, 1, gzip_header_len+8, f) != gzip_header_len+8) {
|
||||||
|
fprintf(stderr, "failed to read chunk %d remaining gzip data\n", i);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t expanded_len = Read8(gzip);
|
||||||
|
size_t target_len = Read8(gzip);
|
||||||
|
int gz_level = Read4(gzip+16);
|
||||||
|
int gz_method = Read4(gzip+20);
|
||||||
|
int gz_windowBits = Read4(gzip+24);
|
||||||
|
int gz_memLevel = Read4(gzip+28);
|
||||||
|
int gz_strategy = Read4(gzip+32);
|
||||||
|
|
||||||
|
// Decompress the source data; the chunk header tells us exactly
|
||||||
|
// how big we expect it to be when decompressed.
|
||||||
|
|
||||||
|
unsigned char* expanded_source = malloc(expanded_len);
|
||||||
|
if (expanded_source == NULL) {
|
||||||
|
fprintf(stderr, "failed to allocate %d bytes for expanded_source\n",
|
||||||
|
expanded_len);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
z_stream strm;
|
||||||
|
strm.zalloc = Z_NULL;
|
||||||
|
strm.zfree = Z_NULL;
|
||||||
|
strm.opaque = Z_NULL;
|
||||||
|
strm.avail_in = src_len - (gzip_header_len + 8);
|
||||||
|
strm.next_in = (unsigned char*)(old_data + src_start + gzip_header_len);
|
||||||
|
strm.avail_out = expanded_len;
|
||||||
|
strm.next_out = expanded_source;
|
||||||
|
|
||||||
|
int ret;
|
||||||
|
ret = inflateInit2(&strm, -15);
|
||||||
|
if (ret != Z_OK) {
|
||||||
|
fprintf(stderr, "failed to init source inflation: %d\n", ret);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Because we've provided enough room to accommodate the output
|
||||||
|
// data, we expect one call to inflate() to suffice.
|
||||||
|
ret = inflate(&strm, Z_SYNC_FLUSH);
|
||||||
|
if (ret != Z_STREAM_END) {
|
||||||
|
fprintf(stderr, "source inflation returned %d\n", ret);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
// We should have filled the output buffer exactly.
|
||||||
|
if (strm.avail_out != 0) {
|
||||||
|
fprintf(stderr, "source inflation short by %d bytes\n", strm.avail_out);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
inflateEnd(&strm);
|
||||||
|
|
||||||
|
// Next, apply the bsdiff patch (in memory) to the uncompressed
|
||||||
|
// data.
|
||||||
|
unsigned char* uncompressed_target_data;
|
||||||
|
ssize_t uncompressed_target_size;
|
||||||
|
if (ApplyBSDiffPatchMem(expanded_source, expanded_len,
|
||||||
|
patch_filename, patch_offset,
|
||||||
|
&uncompressed_target_data,
|
||||||
|
&uncompressed_target_size) != 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now compress the target data and append it to the output.
|
||||||
|
|
||||||
|
// start with the gzip header.
|
||||||
|
fwrite(gzip+40, 1, gzip_header_len, output);
|
||||||
|
SHA_update(ctx, gzip+40, gzip_header_len);
|
||||||
|
|
||||||
|
// we're done with the expanded_source data buffer, so we'll
|
||||||
|
// reuse that memory to receive the output of deflate.
|
||||||
|
unsigned char* temp_data = expanded_source;
|
||||||
|
ssize_t temp_size = expanded_len;
|
||||||
|
if (temp_size < 32768) {
|
||||||
|
// ... unless the buffer is too small, in which case we'll
|
||||||
|
// allocate a fresh one.
|
||||||
|
free(temp_data);
|
||||||
|
temp_data = malloc(32768);
|
||||||
|
temp_size = 32768;
|
||||||
|
}
|
||||||
|
|
||||||
|
// now the deflate stream
|
||||||
|
strm.zalloc = Z_NULL;
|
||||||
|
strm.zfree = Z_NULL;
|
||||||
|
strm.opaque = Z_NULL;
|
||||||
|
strm.avail_in = uncompressed_target_size;
|
||||||
|
strm.next_in = uncompressed_target_data;
|
||||||
|
ret = deflateInit2(&strm, gz_level, gz_method, gz_windowBits,
|
||||||
|
gz_memLevel, gz_strategy);
|
||||||
|
do {
|
||||||
|
strm.avail_out = temp_size;
|
||||||
|
strm.next_out = temp_data;
|
||||||
|
ret = deflate(&strm, Z_FINISH);
|
||||||
|
size_t have = temp_size - strm.avail_out;
|
||||||
|
|
||||||
|
if (fwrite(temp_data, 1, have, output) != have) {
|
||||||
|
fprintf(stderr, "failed to write %d compressed bytes to output\n",
|
||||||
|
have);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
SHA_update(ctx, temp_data, have);
|
||||||
|
} while (ret != Z_STREAM_END);
|
||||||
|
deflateEnd(&strm);
|
||||||
|
|
||||||
|
// lastly, the gzip footer.
|
||||||
|
fwrite(gzip+40+gzip_header_len, 1, 8, output);
|
||||||
|
SHA_update(ctx, gzip+40+gzip_header_len, 8);
|
||||||
|
|
||||||
|
free(temp_data);
|
||||||
|
free(uncompressed_target_data);
|
||||||
|
free(gzip);
|
||||||
|
} else {
|
||||||
|
fprintf(stderr, "patch chunk %d is unknown type %d\n", i, type);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
60
tools/applypatch/main.c
Normal file
60
tools/applypatch/main.c
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2009 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
|
||||||
|
extern int applypatch(int argc, char** argv);
|
||||||
|
|
||||||
|
// This program applies binary patches to files in a way that is safe
|
||||||
|
// (the original file is not touched until we have the desired
|
||||||
|
// replacement for it) and idempotent (it's okay to run this program
|
||||||
|
// multiple times).
|
||||||
|
//
|
||||||
|
// - if the sha1 hash of <tgt-file> is <tgt-sha1>, does nothing and exits
|
||||||
|
// successfully.
|
||||||
|
//
|
||||||
|
// - otherwise, if the sha1 hash of <src-file> is <src-sha1>, applies the
|
||||||
|
// bsdiff <patch> to <src-file> to produce a new file (the type of patch
|
||||||
|
// is automatically detected from the file header). If that new
|
||||||
|
// file has sha1 hash <tgt-sha1>, moves it to replace <tgt-file>, and
|
||||||
|
// exits successfully. Note that if <src-file> and <tgt-file> are
|
||||||
|
// not the same, <src-file> is NOT deleted on success. <tgt-file>
|
||||||
|
// may be the string "-" to mean "the same as src-file".
|
||||||
|
//
|
||||||
|
// - otherwise, or if any error is encountered, exits with non-zero
|
||||||
|
// status.
|
||||||
|
//
|
||||||
|
// <src-file> (or <file> in check mode) may refer to an MTD partition
|
||||||
|
// to read the source data. See the comments for the
|
||||||
|
// LoadMTDContents() function above for the format of such a filename.
|
||||||
|
|
||||||
|
int main(int argc, char** argv) {
|
||||||
|
int result = applypatch(argc, argv);
|
||||||
|
if (result == 2) {
|
||||||
|
fprintf(stderr,
|
||||||
|
"usage: %s <src-file> <tgt-file> <tgt-sha1> <tgt-size> "
|
||||||
|
"[<src-sha1>:<patch> ...]\n"
|
||||||
|
" or %s -c <file> [<sha1> ...]\n"
|
||||||
|
" or %s -s <bytes>\n"
|
||||||
|
" or %s -l\n"
|
||||||
|
"\n"
|
||||||
|
"Filenames may be of the form\n"
|
||||||
|
" MTD:<partition>:<len_1>:<sha1_1>:<len_2>:<sha1_2>:...\n"
|
||||||
|
"to specify reading from or writing to an MTD partition.\n\n",
|
||||||
|
argv[0], argv[0], argv[0], argv[0]);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
@@ -7,6 +7,7 @@ echo "ro.build.id=$BUILD_ID"
|
|||||||
echo "ro.build.display.id=$BUILD_DISPLAY_ID"
|
echo "ro.build.display.id=$BUILD_DISPLAY_ID"
|
||||||
echo "ro.build.version.incremental=$BUILD_NUMBER"
|
echo "ro.build.version.incremental=$BUILD_NUMBER"
|
||||||
echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
|
echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
|
||||||
|
echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
|
||||||
echo "ro.build.version.release=$PLATFORM_VERSION"
|
echo "ro.build.version.release=$PLATFORM_VERSION"
|
||||||
echo "ro.build.date=`date`"
|
echo "ro.build.date=`date`"
|
||||||
echo "ro.build.date.utc=`date +%s`"
|
echo "ro.build.date.utc=`date +%s`"
|
||||||
@@ -19,6 +20,7 @@ echo "ro.product.brand=$PRODUCT_BRAND"
|
|||||||
echo "ro.product.name=$PRODUCT_NAME"
|
echo "ro.product.name=$PRODUCT_NAME"
|
||||||
echo "ro.product.device=$TARGET_DEVICE"
|
echo "ro.product.device=$TARGET_DEVICE"
|
||||||
echo "ro.product.board=$TARGET_BOOTLOADER_BOARD_NAME"
|
echo "ro.product.board=$TARGET_BOOTLOADER_BOARD_NAME"
|
||||||
|
echo "ro.product.cpu.abi=$TARGET_CPU_ABI"
|
||||||
echo "ro.product.manufacturer=$PRODUCT_MANUFACTURER"
|
echo "ro.product.manufacturer=$PRODUCT_MANUFACTURER"
|
||||||
echo "ro.product.locale.language=$PRODUCT_DEFAULT_LANGUAGE"
|
echo "ro.product.locale.language=$PRODUCT_DEFAULT_LANGUAGE"
|
||||||
echo "ro.product.locale.region=$PRODUCT_DEFAULT_REGION"
|
echo "ro.product.locale.region=$PRODUCT_DEFAULT_REGION"
|
||||||
|
@@ -24,6 +24,6 @@ LOCAL_C_INCLUDES := external/zlib
|
|||||||
LOCAL_SHARED_LIBRARIES := libz
|
LOCAL_SHARED_LIBRARIES := libz
|
||||||
|
|
||||||
LOCAL_MODULE := afar
|
LOCAL_MODULE := afar
|
||||||
LOCAL_MODULE_TAGS := tests
|
LOCAL_MODULE_TAGS := optional
|
||||||
|
|
||||||
include $(BUILD_EXECUTABLE)
|
include $(BUILD_EXECUTABLE)
|
||||||
|
@@ -31,6 +31,6 @@ LOCAL_SHARED_LIBRARIES := \
|
|||||||
|
|
||||||
LOCAL_MODULE := dexopt-wrapper
|
LOCAL_MODULE := dexopt-wrapper
|
||||||
|
|
||||||
LOCAL_MODULE_TAGS := tests
|
LOCAL_MODULE_TAGS := optional
|
||||||
|
|
||||||
include $(BUILD_EXECUTABLE)
|
include $(BUILD_EXECUTABLE)
|
||||||
|
@@ -47,6 +47,7 @@ public class Comment
|
|||||||
"@deprecated",
|
"@deprecated",
|
||||||
"@undeprecate",
|
"@undeprecate",
|
||||||
"@docRoot",
|
"@docRoot",
|
||||||
|
"@sdkCurrent",
|
||||||
"@inheritDoc",
|
"@inheritDoc",
|
||||||
"@more",
|
"@more",
|
||||||
"@code",
|
"@code",
|
||||||
|
@@ -115,23 +115,30 @@ public class DocFile
|
|||||||
TagInfo.makeHDF(hdf, "root.descr", tags);
|
TagInfo.makeHDF(hdf, "root.descr", tags);
|
||||||
|
|
||||||
hdf.setValue("commentText", commentText);
|
hdf.setValue("commentText", commentText);
|
||||||
|
|
||||||
if (outfile.indexOf("sdk/") != -1) {
|
// write the page using the appropriate root template, based on the
|
||||||
hdf.setValue("sdk", "true");
|
// whichdoc value supplied by build
|
||||||
if (outfile.indexOf("index.html") != -1) {
|
String fromWhichmodule = hdf.getValue("android.whichmodule", "");
|
||||||
ClearPage.write(hdf, "sdkpage.cs", outfile);
|
if (fromWhichmodule.equals("online-pdk")) {
|
||||||
} else {
|
//leaving this in just for temporary compatibility with pdk doc
|
||||||
ClearPage.write(hdf, "docpage.cs", outfile);
|
hdf.setValue("online-pdk", "true");
|
||||||
}
|
// add any conditional login for root template here (such as
|
||||||
} else if (outfile.indexOf("guide/") != -1){
|
// for custom left nav based on tab etc.
|
||||||
hdf.setValue("guide", "true");
|
|
||||||
ClearPage.write(hdf, "docpage.cs", outfile);
|
|
||||||
} else if (outfile.indexOf("publish/") != -1){
|
|
||||||
hdf.setValue("publish", "true");
|
|
||||||
ClearPage.write(hdf, "docpage.cs", outfile);
|
ClearPage.write(hdf, "docpage.cs", outfile);
|
||||||
} else {
|
} else {
|
||||||
ClearPage.write(hdf, "nosidenavpage.cs", outfile);
|
if (outfile.indexOf("sdk/") != -1) {
|
||||||
|
hdf.setValue("sdk", "true");
|
||||||
|
if (outfile.indexOf("index.html") != -1) {
|
||||||
|
ClearPage.write(hdf, "sdkpage.cs", outfile);
|
||||||
|
} else {
|
||||||
|
ClearPage.write(hdf, "docpage.cs", outfile);
|
||||||
|
}
|
||||||
|
} else if (outfile.indexOf("guide/") != -1) {
|
||||||
|
hdf.setValue("guide", "true");
|
||||||
|
ClearPage.write(hdf, "docpage.cs", outfile);
|
||||||
|
} else {
|
||||||
|
ClearPage.write(hdf, "nosidenavpage.cs", outfile);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
} //writePage
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@@ -5,8 +5,8 @@ left nav (toc) that gets placed on all pages. ?>
|
|||||||
def:custom_masthead() ?>
|
def:custom_masthead() ?>
|
||||||
<div id="header">
|
<div id="header">
|
||||||
<div id="headerLeft">
|
<div id="headerLeft">
|
||||||
<a href="<?cs var:toroot ?>index.html" tabindex="-1"><img
|
<a href="<?cs var:toroot ?>guide/index.html" tabindex="-1"><img
|
||||||
src="<?cs var:toroot ?>assets/images/bg_logo.png" alt="Android Porting Development Kit" /></a>
|
src="<?cs var:toroot ?>assets/images/open_source.png" alt="Open Source Project: Platform Development Kit" /></a>
|
||||||
<ul class="<?cs
|
<ul class="<?cs
|
||||||
if:reference ?> <?cs
|
if:reference ?> <?cs
|
||||||
elif:guide ?> <?cs
|
elif:guide ?> <?cs
|
||||||
@@ -15,17 +15,10 @@ def:custom_masthead() ?>
|
|||||||
elif:community ?> <?cs
|
elif:community ?> <?cs
|
||||||
elif:publish ?> <?cs
|
elif:publish ?> <?cs
|
||||||
elif:about ?> <?cs /if ?>">
|
elif:about ?> <?cs /if ?>">
|
||||||
<li id="home-link"><a href="<?cs var:toroot ?><?cs
|
<!--<li id="guide-link"><a href="<?cs var:toroot ?>guide/index.html"
|
||||||
if:android.whichdoc != "online-pdk" ?>offline.html<?cs
|
onClick="return loadLast('guide)'"><span>Dev Guide</span></a></li>
|
||||||
else ?>index.html<?cs /if ?>">
|
<li id="opensource-link"><a href="http://source.android.com/"
|
||||||
<span>Home</span></a></li>
|
onClick="return loadLast('open')"><span>Open Source</span></a></li>-->
|
||||||
<!--<li id="sdk-link"><a href="<?cs var:toroot ?>index.html"><span>SDK</span></a></li>-->
|
|
||||||
<li id="guide-link"><a href="<?cs var:toroot ?>guide/index.html"
|
|
||||||
onClick="return loadLast('guide')"><span>Porting Guide</span></a></li>
|
|
||||||
<!--<li id="reference-link"><a href="<?cs var:toroot ?>reference/packages.html"
|
|
||||||
onClick="return loadLast('reference')"><span>Reference</span></a></li>
|
|
||||||
<li><a href="http://android-developers.blogspot.com"><span>Blog</span></a></li>
|
|
||||||
<li id="community-link"><a href="<?cs var:toroot ?>community/index.html"><span>Community</span></a></li>-->
|
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div id="headerRight">
|
<div id="headerRight">
|
||||||
@@ -35,8 +28,7 @@ def:custom_masthead() ?>
|
|||||||
<!-- <a href="#">English</a> | -->
|
<!-- <a href="#">English</a> | -->
|
||||||
<a href="http://www.android.com">Android.com</a>
|
<a href="http://www.android.com">Android.com</a>
|
||||||
</span>
|
</span>
|
||||||
</div><?cs
|
</div>
|
||||||
call:default_search_box() ?>
|
|
||||||
</div><!-- headerRight -->
|
</div><!-- headerRight -->
|
||||||
</div><!-- header --><?cs
|
</div><!-- header --><?cs
|
||||||
/def ?><?cs # custom_masthead ?>
|
/def ?><?cs # custom_masthead ?>
|
||||||
@@ -47,7 +39,7 @@ def:guide_nav() ?>
|
|||||||
<div class="g-section g-tpl-240" id="body-content">
|
<div class="g-section g-tpl-240" id="body-content">
|
||||||
<div class="g-unit g-first side-nav-resizable" id="side-nav">
|
<div class="g-unit g-first side-nav-resizable" id="side-nav">
|
||||||
<div id="devdoc-nav"><?cs
|
<div id="devdoc-nav"><?cs
|
||||||
include:"../../../../development/pdk/docs/html/guide/guide_toc.cs" ?>
|
include:"../../../../development/pdk/docs/guide/pdk_toc.cs" ?>
|
||||||
</div>
|
</div>
|
||||||
</div> <!-- end side-nav -->
|
</div> <!-- end side-nav -->
|
||||||
<script>
|
<script>
|
||||||
@@ -59,11 +51,7 @@ def:guide_nav() ?>
|
|||||||
|
|
||||||
<?cs
|
<?cs
|
||||||
def:custom_left_nav() ?><?cs
|
def:custom_left_nav() ?><?cs
|
||||||
if:guide ?><?cs
|
|
||||||
call:guide_nav() ?><?cs
|
call:guide_nav() ?><?cs
|
||||||
else ?><?cs
|
|
||||||
call:default_left_nav() ?><?cs
|
|
||||||
/if ?><?cs
|
|
||||||
/def ?>
|
/def ?>
|
||||||
|
|
||||||
<?cs # appears at the bottom of every page ?><?cs
|
<?cs # appears at the bottom of every page ?><?cs
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
if:sdk.version ?> (<?cs
|
if:sdk.version ?> (<?cs
|
||||||
var:sdk.version ?>)<?cs
|
var:sdk.version ?>)<?cs
|
||||||
/if ?> | <?cs
|
/if ?> | <?cs
|
||||||
/if ?>Android Developers</title>
|
/if ?>Android Open Source</title>
|
||||||
<link href="<?cs var:toroot ?>assets/android-developer-docs-devguide.css" rel="stylesheet" type="text/css" />
|
<link href="<?cs var:toroot ?>assets/android-developer-docs-devguide.css" rel="stylesheet" type="text/css" />
|
||||||
<link href="<?cs var:toroot ?>assets-pdk/pdk-local.css" rel="stylesheet" type="text/css" />
|
<link href="<?cs var:toroot ?>assets-pdk/pdk-local.css" rel="stylesheet" type="text/css" />
|
||||||
<script src="<?cs var:toroot ?>assets/search_autocomplete.js" type="text/javascript"></script>
|
<script src="<?cs var:toroot ?>assets/search_autocomplete.js" type="text/javascript"></script>
|
||||||
|
@@ -1,45 +1,74 @@
|
|||||||
<?cs # This default template file is meant to be replaced. ?>
|
<?cs # This default template file is meant to be replaced. ?>
|
||||||
<?cs # Use the -tempatedir arg to javadoc to set your own directory with a replacement for this file in it. ?>
|
<?cs # Use the -tempatedir arg to javadoc to set your own directory with a replacement for this file in it. ?>
|
||||||
|
|
||||||
|
|
||||||
|
<?cs # The default search box that goes in the header ?><?cs
|
||||||
|
def:default_search_box() ?>
|
||||||
|
<div id="search" >
|
||||||
|
<div id="searchForm">
|
||||||
|
<form accept-charset="utf-8" class="gsc-search-box"
|
||||||
|
onsubmit="return submit_search()">
|
||||||
|
<table class="gsc-search-box" cellpadding="0" cellspacing="0"><tbody>
|
||||||
|
<tr>
|
||||||
|
<td class="gsc-input">
|
||||||
|
<input id="search_autocomplete" class="gsc-input" type="text" size="33" autocomplete="off"
|
||||||
|
title="search developer docs" name="q"
|
||||||
|
value="search developer docs"
|
||||||
|
onFocus="search_focus_changed(this, true)"
|
||||||
|
onBlur="search_focus_changed(this, false)"
|
||||||
|
onkeydown="return search_changed(event, true, '<?cs var:toroot?>')"
|
||||||
|
onkeyup="return search_changed(event, false, '<?cs var:toroot?>')" />
|
||||||
|
<div id="search_filtered_div" class="no-display">
|
||||||
|
<table id="search_filtered" cellspacing=0>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td class="gsc-search-button">
|
||||||
|
<input type="submit" value="Search" title="search" id="search-button" class="gsc-search-button" />
|
||||||
|
</td>
|
||||||
|
<td class="gsc-clear-button">
|
||||||
|
<div title="clear results" class="gsc-clear-button"> </div>
|
||||||
|
</td>
|
||||||
|
</tr></tbody>
|
||||||
|
</table>
|
||||||
|
</form>
|
||||||
|
</div><!-- searchForm -->
|
||||||
|
</div><!-- search --><?cs
|
||||||
|
/def ?>
|
||||||
|
|
||||||
<?cs
|
<?cs
|
||||||
def:custom_masthead() ?>
|
def:custom_masthead() ?>
|
||||||
<div id="header">
|
<div id="header">
|
||||||
<div id="headerLeft">
|
<div id="headerLeft">
|
||||||
<a href="<?cs var:toroot ?>index.html" tabindex="-1"><img
|
<a href="<?cs var:toroot ?>index.html" tabindex="-1"><img
|
||||||
src="<?cs var:toroot ?>assets/images/bg_logo.png" alt="Android Developers" /></a>
|
src="<?cs var:toroot ?>assets/images/bg_logo.png" alt="Android Developers" /></a>
|
||||||
<ul class="<?cs
|
<?cs include:"header_tabs.cs" ?> <?cs # The links are extracted so we can better manage localization ?>
|
||||||
if:reference ?>reference<?cs
|
|
||||||
elif:guide ?>guide<?cs
|
|
||||||
elif:sdk ?>sdk<?cs
|
|
||||||
elif:home ?>home<?cs
|
|
||||||
elif:community ?>community<?cs
|
|
||||||
elif:publish ?>publish<?cs
|
|
||||||
elif:about ?>about<?cs /if ?>">
|
|
||||||
<li id="home-link"><a href="<?cs var:toroot ?><?cs
|
|
||||||
if:android.whichdoc != "online" ?>offline.html<?cs
|
|
||||||
else ?>index.html<?cs /if ?>">
|
|
||||||
<span>Home</span></a></li>
|
|
||||||
<li id="sdk-link"><a href="<?cs var:toroot ?>sdk/1.1_r1/index.html"><span>SDK</span></a></li>
|
|
||||||
<li id="guide-link"><a href="<?cs var:toroot ?>guide/index.html"
|
|
||||||
onClick="return loadLast('guide')"><span>Dev Guide</span></a></li>
|
|
||||||
<li id="reference-link"><a href="<?cs var:toroot ?>reference/packages.html"
|
|
||||||
onClick="return loadLast('reference')"><span>Reference</span></a></li>
|
|
||||||
<li><a href="http://android-developers.blogspot.com"><span>Blog</span></a></li>
|
|
||||||
<li id="community-link"><a href="<?cs var:toroot ?>community/index.html"><span>Community</span></a></li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
</div>
|
||||||
<div id="headerRight">
|
<div id="headerRight">
|
||||||
<div id="headerLinks">
|
<div id="headerLinks">
|
||||||
<!-- <img src="<?cs var:toroot ?>assets/images/icon_world.jpg" alt="" /> -->
|
<!-- <img src="<?cs var:toroot ?>assets/images/icon_world.jpg" alt="" /> -->
|
||||||
<span class="text">
|
<span id="language">
|
||||||
<!-- <a href="#">English</a> | -->
|
<select name="language" onChange="changeLangPref(this.value)">
|
||||||
<a href="http://www.android.com">Android.com</a>
|
<option value="en">English</option>
|
||||||
</span>
|
<!-- <option value="ja"></option> -->
|
||||||
|
</select>
|
||||||
|
<script type="text/javascript">
|
||||||
|
<!--
|
||||||
|
loadLangPref();
|
||||||
|
//-->
|
||||||
|
</script>
|
||||||
|
</span>
|
||||||
|
<a href="http://www.android.com">Android.com</a>
|
||||||
</div><?cs
|
</div><?cs
|
||||||
call:default_search_box() ?>
|
call:default_search_box() ?>
|
||||||
</div><!-- headerRight -->
|
</div><!-- headerRight -->
|
||||||
|
<script type="text/javascript">
|
||||||
|
<!--
|
||||||
|
changeTabLang(getLangPref());
|
||||||
|
//-->
|
||||||
|
</script>
|
||||||
</div><!-- header --><?cs
|
</div><!-- header --><?cs
|
||||||
/def ?><?cs # custom_masthead ?>
|
/def ?>
|
||||||
|
|
||||||
<?cs
|
<?cs
|
||||||
def:sdk_nav() ?>
|
def:sdk_nav() ?>
|
||||||
@@ -66,22 +95,80 @@ def:guide_nav() ?>
|
|||||||
</script>
|
</script>
|
||||||
<?cs /def ?>
|
<?cs /def ?>
|
||||||
|
|
||||||
<?cs
|
<?cs # The default side navigation for the reference docs ?><?cs
|
||||||
def:publish_nav() ?>
|
def:default_left_nav() ?>
|
||||||
<div class="g-section g-tpl-180" id="body-content">
|
<div class="g-section g-tpl-240" id="body-content">
|
||||||
<div class="g-unit g-first" id="side-nav">
|
<div class="g-unit g-first side-nav-resizable" id="side-nav">
|
||||||
<div id="devdoc-nav"><?cs
|
<div id="swapper">
|
||||||
include:"../../../../frameworks/base/docs/html/publish/publish_toc.cs" ?>
|
<div id="nav-panels">
|
||||||
</div>
|
<div id="resize-packages-nav">
|
||||||
|
<div id="packages-nav">
|
||||||
|
<div id="index-links"><nobr>
|
||||||
|
<a href="<?cs var:toroot ?>reference/packages.html" <?cs if:(page.title == "Package Index") ?>class="selected"<?cs /if ?> >Package Index</a> |
|
||||||
|
<a href="<?cs var:toroot ?>reference/classes.html" <?cs if:(page.title == "Class Index") ?>class="selected"<?cs /if ?>>Class Index</a></nobr>
|
||||||
|
</div>
|
||||||
|
<ul><?cs
|
||||||
|
each:pkg=docs.packages ?>
|
||||||
|
<li <?cs if:(class.package.name == pkg.name) || (package.name == pkg.name)?>class="selected"<?cs /if ?>><?cs call:package_link(pkg) ?></li><?cs
|
||||||
|
/each ?>
|
||||||
|
</ul><br/>
|
||||||
|
</div> <!-- end packages -->
|
||||||
|
</div> <!-- end resize-packages -->
|
||||||
|
<div id="classes-nav"><?cs
|
||||||
|
if:subcount(class.package) ?>
|
||||||
|
<ul>
|
||||||
|
<?cs call:list("Interfaces", class.package.interfaces) ?>
|
||||||
|
<?cs call:list("Classes", class.package.classes) ?>
|
||||||
|
<?cs call:list("Enums", class.package.enums) ?>
|
||||||
|
<?cs call:list("Exceptions", class.package.exceptions) ?>
|
||||||
|
<?cs call:list("Errors", class.package.errors) ?>
|
||||||
|
</ul><?cs
|
||||||
|
elif:subcount(package) ?>
|
||||||
|
<ul>
|
||||||
|
<?cs call:class_link_list("Interfaces", package.interfaces) ?>
|
||||||
|
<?cs call:class_link_list("Classes", package.classes) ?>
|
||||||
|
<?cs call:class_link_list("Enums", package.enums) ?>
|
||||||
|
<?cs call:class_link_list("Exceptions", package.exceptions) ?>
|
||||||
|
<?cs call:class_link_list("Errors", package.errors) ?>
|
||||||
|
</ul><?cs
|
||||||
|
else ?>
|
||||||
|
<script>
|
||||||
|
/*addLoadEvent(maxPackageHeight);*/
|
||||||
|
</script>
|
||||||
|
<p style="padding:10px">Select a package to view its members</p><?cs
|
||||||
|
/if ?><br/>
|
||||||
|
</div><!-- end classes -->
|
||||||
|
</div><!-- end nav-panels -->
|
||||||
|
<div id="nav-tree" style="display:none">
|
||||||
|
<div id="index-links"><nobr>
|
||||||
|
<a href="<?cs var:toroot ?>reference/packages.html" <?cs if:(page.title == "Package Index") ?>class="selected"<?cs /if ?> >Package Index</a> |
|
||||||
|
<a href="<?cs var:toroot ?>reference/classes.html" <?cs if:(page.title == "Class Index") ?>class="selected"<?cs /if ?>>Class Index</a></nobr>
|
||||||
|
</div>
|
||||||
|
</div><!-- end nav-tree -->
|
||||||
|
</div><!-- end swapper -->
|
||||||
</div> <!-- end side-nav -->
|
</div> <!-- end side-nav -->
|
||||||
<?cs /def ?>
|
<script>
|
||||||
|
if (!isMobile) {
|
||||||
|
$("<a href='#' id='nav-swap' onclick='swapNav();return false;' style='font-size:10px;line-height:9px;margin-left:1em;text-decoration:none;'><span id='tree-link'>Use Tree Navigation</span><span id='panel-link' style='display:none'>Use Panel Navigation</span></a>").appendTo("#side-nav");
|
||||||
|
chooseDefaultNav();
|
||||||
|
if ($("#nav-tree").is(':visible')) init_navtree("nav-tree", "<?cs var:toroot ?>", NAVTREE_DATA);
|
||||||
|
else {
|
||||||
|
addLoadEvent(function() {
|
||||||
|
scrollIntoView("packages-nav");
|
||||||
|
scrollIntoView("classes-nav");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
$("#swapper").css({borderBottom:"2px solid #aaa"});
|
||||||
|
} else {
|
||||||
|
swapNav(); // tree view should be used on mobile
|
||||||
|
}
|
||||||
|
</script><?cs
|
||||||
|
/def ?>
|
||||||
|
|
||||||
<?cs
|
<?cs
|
||||||
def:custom_left_nav() ?><?cs
|
def:custom_left_nav() ?><?cs
|
||||||
if:guide ?><?cs
|
if:guide ?><?cs
|
||||||
call:guide_nav() ?><?cs
|
call:guide_nav() ?><?cs
|
||||||
elif:publish ?><?cs
|
|
||||||
call:publish_nav() ?><?cs
|
|
||||||
elif:sdk ?><?cs
|
elif:sdk ?><?cs
|
||||||
call:sdk_nav() ?><?cs
|
call:sdk_nav() ?><?cs
|
||||||
else ?><?cs
|
else ?><?cs
|
||||||
@@ -115,7 +202,7 @@ def:custom_footerlinks() ?>
|
|||||||
</p><?cs
|
</p><?cs
|
||||||
/def ?>
|
/def ?>
|
||||||
|
|
||||||
<?cs # appears on the right side of the blue bar at the bottom of every page ?><?cs
|
<?cs # appears on the right side of the blue bar at the bottom off every page ?><?cs
|
||||||
def:custom_buildinfo() ?>
|
def:custom_buildinfo() ?>
|
||||||
Android 1.1 r1 - <?cs var:page.now ?><?cs
|
Android <?cs var:sdk.version ?> r<?cs var:sdk.rel.id ?> - <?cs var:page.now ?>
|
||||||
/def ?>
|
<?cs /def ?>
|
||||||
|
35
tools/droiddoc/templates-sdk/header_tabs.cs
Normal file
35
tools/droiddoc/templates-sdk/header_tabs.cs
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
<ul id="header-tabs" class="<?cs
|
||||||
|
if:reference ?>reference<?cs
|
||||||
|
elif:guide ?>guide<?cs
|
||||||
|
elif:sdk ?>sdk<?cs
|
||||||
|
elif:home ?>home<?cs
|
||||||
|
elif:community ?>community<?cs
|
||||||
|
elif:publish ?>publish<?cs
|
||||||
|
elif:about ?>about<?cs /if ?>">
|
||||||
|
|
||||||
|
<li id="home-link"><a href="<?cs var:toroot ?><?cs if:android.whichdoc != "online" ?>offline.html<?cs else ?>index.html<?cs /if ?>">
|
||||||
|
<span class="en">Home</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
<li id="sdk-link"><a href="<?cs var:toroot ?>sdk/<?cs var:sdk.current ?>/index.html">
|
||||||
|
<span class="en">SDK</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
<li id="guide-link"><a href="<?cs var:toroot ?>guide/index.html" onClick="return loadLast('guide')">
|
||||||
|
<span class="en">Dev Guide</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
<li id="reference-link"><a href="<?cs var:toroot ?>reference/packages.html" onClick="return loadLast('reference')">
|
||||||
|
<span class="en">Reference</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
<li><a href="http://android-developers.blogspot.com">
|
||||||
|
<span class="en">Blog</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
<li id="community-link"><a href="<?cs var:toroot ?>community/index.html">
|
||||||
|
<span class="en">Community</span>
|
||||||
|
<span class="ja"></span>
|
||||||
|
</a></li>
|
||||||
|
|
||||||
|
</ul>
|
@@ -4,7 +4,7 @@
|
|||||||
<?cs if:sdk.redirect ?>
|
<?cs if:sdk.redirect ?>
|
||||||
<head>
|
<head>
|
||||||
<title>Redirecting...</title>
|
<title>Redirecting...</title>
|
||||||
<meta http-equiv="refresh" content="0;url=<?cs var:toroot ?>sdk/<?cs var:sdk.redirect ?>/index.html">
|
<meta http-equiv="refresh" content="0;url=<?cs var:toroot ?>sdk/<?cs var:sdk.current ?>/index.html">
|
||||||
<link href="<?cs var:toroot ?>assets/android-developer-docs.css" rel="stylesheet" type="text/css" />
|
<link href="<?cs var:toroot ?>assets/android-developer-docs.css" rel="stylesheet" type="text/css" />
|
||||||
</head>
|
</head>
|
||||||
<?cs else ?>
|
<?cs else ?>
|
||||||
@@ -16,32 +16,33 @@
|
|||||||
|
|
||||||
<?cs call:sdk_nav() ?>
|
<?cs call:sdk_nav() ?>
|
||||||
|
|
||||||
|
|
||||||
<div class="g-unit" id="doc-content" >
|
|
||||||
|
|
||||||
<?cs if:sdk.redirect ?>
|
<?cs if:sdk.redirect ?>
|
||||||
Redirecting to
|
<div class="g-unit">
|
||||||
<a href="<?cs var:toroot ?>sdk/<?cs var:sdk.redirect ?>/index.html">
|
<div id="jd-content">
|
||||||
<?cs var:toroot ?>sdk/<?cs var:sdk.redirect ?>/index.html
|
<p>Redirecting to
|
||||||
</a>...
|
<a href="/sdk/<?cs var:sdk.current ?>/index.html">
|
||||||
|
/sdk/<?cs var:sdk.current ?>/index.html
|
||||||
|
</a></p>
|
||||||
<?cs else ?>
|
<?cs else ?>
|
||||||
|
<div class="g-unit" id="doc-content" >
|
||||||
<div id="jd-header" class="guide-header" >
|
<div id="jd-header" class="guide-header" >
|
||||||
<span class="crumb"> </span>
|
<span class="crumb"> </span>
|
||||||
<h1><?cs if:android.whichdoc == "online" ?>Download <?cs /if ?><?cs var:page.title ?></h1>
|
<h1><?cs if:android.whichdoc == "online" ?>Download <?cs /if ?><?cs var:page.title ?></h1>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="jd-content">
|
||||||
<div id="jd-content">
|
<p><em><?cs
|
||||||
|
if:ndk ?><?cs
|
||||||
<p><em>
|
var:ndk.date ?><?cs
|
||||||
<?cs var:sdk.date ?>
|
else ?><?cs
|
||||||
</em></p>
|
var:sdk.date ?><?cs
|
||||||
|
/if ?></em>
|
||||||
|
</p>
|
||||||
|
|
||||||
<?cs if:sdk.not_latest_version ?>
|
<?cs if:sdk.not_latest_version ?>
|
||||||
<div class="special">
|
<div class="special">
|
||||||
<p><strong>This is NOT the current Android SDK release.</strong></p>
|
<p><strong>This is NOT the current Android SDK release.</strong></p>
|
||||||
<p>Use the links under <strong>Current SDK Release</strong>, on the left, to be directed to the current SDK.</p>
|
<p><a href="/sdk/<?cs var:sdk.current ?>/index.html">Download the current Android SDK</a></p>
|
||||||
</div>
|
</div>
|
||||||
<?cs /if ?>
|
<?cs /if ?>
|
||||||
|
|
||||||
@@ -51,8 +52,63 @@
|
|||||||
<p>The sections below provide an overview of the SDK package. </p>
|
<p>The sections below provide an overview of the SDK package. </p>
|
||||||
|
|
||||||
<?cs else ?>
|
<?cs else ?>
|
||||||
|
<?cs if:ndk ?>
|
||||||
|
|
||||||
<p>Before downloading, please read the <a href="<?cs var:toroot ?>sdk/<?cs var:sdk.version ?>/requirements.html">
|
<p>The Android NDK is a companion tool to the Android SDK that lets Android
|
||||||
|
application developers build performance-critical portions of their apps in
|
||||||
|
native code. It is designed for use <em>only</em> in conjunction with the
|
||||||
|
Android SDK, so if you have not already installed the Android 1.5 SDK, please do
|
||||||
|
so before downloading the NDK. Also, please read <a href="">What is the Android
|
||||||
|
NDK?</a> to get an understanding of what the NDK offers and whether it will be
|
||||||
|
useful to you.</p>
|
||||||
|
|
||||||
|
<p>Select the download package that is appropriate for your development
|
||||||
|
computer. Note that separate download packages are provided for 32- and 64-bit
|
||||||
|
Linux platforms.</p>
|
||||||
|
|
||||||
|
<table class="download">
|
||||||
|
<tr>
|
||||||
|
<th>Platform</th>
|
||||||
|
<th>Package</th>
|
||||||
|
<th>Size</th>
|
||||||
|
<th>MD5 Checksum</th>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Windows</td>
|
||||||
|
<td>
|
||||||
|
<a href="http://dl.google.com/android/<?cs var:ndk.win_download ?>"><?cs var:ndk.win_download ?></a>
|
||||||
|
</td>
|
||||||
|
<td><?cs var:ndk.win_bytes ?> bytes</td>
|
||||||
|
<td><?cs var:ndk.win_checksum ?></td>
|
||||||
|
</tr>
|
||||||
|
<tr class="alt-color">
|
||||||
|
<td>Mac OS X (intel)</td>
|
||||||
|
<td>
|
||||||
|
<a href="http://dl.google.com/android/<?cs var:ndk.mac_download ?>"><?cs var:ndk.mac_download ?></a>
|
||||||
|
</td>
|
||||||
|
<td><?cs var:ndk.mac_bytes ?> bytes</td>
|
||||||
|
<td><?cs var:ndk.mac_checksum ?></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Linux 32-bit (i386)</td>
|
||||||
|
<td>
|
||||||
|
<a href="http://dl.google.com/android/<?cs var:ndk.linux_download ?>"><?cs var:ndk.linux_download ?></a>
|
||||||
|
</td>
|
||||||
|
<td><?cs var:ndk.linux_bytes ?> bytes</td>
|
||||||
|
<td><?cs var:ndk.linux_checksum ?></td>
|
||||||
|
</tr>
|
||||||
|
<tr class="alt-color">
|
||||||
|
<td>Linux 64-bit (x86_64)</td>
|
||||||
|
<td>
|
||||||
|
<a href="http://dl.google.com/android/<?cs var:ndk.linux_64_download ?>"><?cs var:ndk.linux_64_download ?></a>
|
||||||
|
</td>
|
||||||
|
<td><?cs var:ndk.linux_64_bytes ?> bytes</td>
|
||||||
|
<td><?cs var:ndk.linux_64_checksum ?></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<?cs else ?>
|
||||||
|
<p>Before downloading, please read the <a href="requirements.html">
|
||||||
System Requirements</a> document. As you start the download, you will also need to review and agree to
|
System Requirements</a> document. As you start the download, you will also need to review and agree to
|
||||||
the Terms and Conditions that govern the use of the Android SDK. </p>
|
the Terms and Conditions that govern the use of the Android SDK. </p>
|
||||||
|
|
||||||
@@ -89,6 +145,7 @@ the Terms and Conditions that govern the use of the Android SDK. </p>
|
|||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
<?cs /if ?>
|
||||||
<?cs /if ?>
|
<?cs /if ?>
|
||||||
|
|
||||||
<?cs call:tag_list(root.descr) ?>
|
<?cs call:tag_list(root.descr) ?>
|
||||||
@@ -96,7 +153,10 @@ the Terms and Conditions that govern the use of the Android SDK. </p>
|
|||||||
<?cs /if ?>
|
<?cs /if ?>
|
||||||
</div><!-- end jd-content -->
|
</div><!-- end jd-content -->
|
||||||
|
|
||||||
<?cs include:"footer.cs" ?>
|
<?cs if:!sdk.redirect ?>
|
||||||
|
<?cs include:"footer.cs" ?>
|
||||||
|
<?cs /if ?>
|
||||||
|
|
||||||
</div><!-- end doc-content -->
|
</div><!-- end doc-content -->
|
||||||
|
|
||||||
<?cs include:"trailer.cs" ?>
|
<?cs include:"trailer.cs" ?>
|
||||||
|
@@ -110,11 +110,18 @@ dd {
|
|||||||
padding:0 0 0 2em;
|
padding:0 0 0 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
li p, dd p {
|
li p {
|
||||||
|
margin:.5em 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
dd p {
|
||||||
margin:1em 0 0;
|
margin:1em 0 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
li pre, li table, li img,
|
li pre, li table, li img {
|
||||||
|
margin:.5em 0 0 1em;
|
||||||
|
}
|
||||||
|
|
||||||
dd pre, dd table, dd img {
|
dd pre, dd table, dd img {
|
||||||
margin:1em 0 0 1em;
|
margin:1em 0 0 1em;
|
||||||
}
|
}
|
||||||
@@ -169,7 +176,6 @@ hr.blue {
|
|||||||
margin:0;
|
margin:0;
|
||||||
position:relative;
|
position:relative;
|
||||||
width:100%;
|
width:100%;
|
||||||
background: url('images/preliminary.png');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#header {
|
#header {
|
||||||
@@ -262,20 +268,16 @@ hr.blue {
|
|||||||
#headerLinks {
|
#headerLinks {
|
||||||
margin:10px 10px 0 0;
|
margin:10px 10px 0 0;
|
||||||
height:13px;
|
height:13px;
|
||||||
}
|
|
||||||
|
|
||||||
#headerLinks .text {
|
|
||||||
text-decoration: none;
|
|
||||||
color: #7FA9B5;
|
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
}
|
}
|
||||||
|
|
||||||
#headerLinks a {
|
#headerLinks a {
|
||||||
text-decoration: underline;
|
|
||||||
color: #7FA9B5;
|
color: #7FA9B5;
|
||||||
font-size: 11px;
|
}
|
||||||
vertical-align: top;
|
|
||||||
|
#language {
|
||||||
|
margin:0 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#search {
|
#search {
|
||||||
@@ -717,14 +719,16 @@ td.gsc-search-button {
|
|||||||
float: left;
|
float: left;
|
||||||
width: 584px;
|
width: 584px;
|
||||||
height: 580px;
|
height: 580px;
|
||||||
background:url(images/home/bg_home_middle.png) no-repeat 0 0;
|
|
||||||
position:relative;
|
position:relative;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#topAnnouncement {
|
||||||
|
background:url(images/home/bg_home_announcement.png) no-repeat 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
#homeTitle {
|
#homeTitle {
|
||||||
margin:15px 15px 0;
|
padding:15px 15px 0;
|
||||||
height:30px;
|
height:30px;
|
||||||
background:url(images/hr_gray_side.jpg) no-repeat 0 29px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#homeTitle h2 {
|
#homeTitle h2 {
|
||||||
@@ -732,8 +736,14 @@ td.gsc-search-button {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#announcement-block {
|
#announcement-block {
|
||||||
margin:15px 15px 0;
|
padding:0 15px 0;
|
||||||
height:125px;
|
overflow:hidden;
|
||||||
|
background: url(images/hr_gray_side.jpg) no-repeat 15px 0;
|
||||||
|
zoom:1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#announcement-block>* {
|
||||||
|
padding:15px 0 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#announcement-block img {
|
#announcement-block img {
|
||||||
@@ -746,6 +756,29 @@ td.gsc-search-button {
|
|||||||
margin:0;
|
margin:0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#carousel {
|
||||||
|
background:url(images/home/bg_home_carousel.png) no-repeat 0 0;
|
||||||
|
position:relative;
|
||||||
|
height:400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#carouselMain {
|
||||||
|
padding: 25px 21px 0;
|
||||||
|
height:185px;
|
||||||
|
background-position:top;
|
||||||
|
overflow:hidden;
|
||||||
|
position:relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
#carouselMain img {
|
||||||
|
margin:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#homeMiddle p {
|
||||||
|
margin:0;
|
||||||
|
padding-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
.clearer { clear:both; }
|
.clearer { clear:both; }
|
||||||
|
|
||||||
#arrow-left, #arrow-right {
|
#arrow-left, #arrow-right {
|
||||||
@@ -824,6 +857,12 @@ div#app-list {
|
|||||||
text-decoration:none;
|
text-decoration:none;
|
||||||
text-align:center;
|
text-align:center;
|
||||||
font-size:11px;
|
font-size:11px;
|
||||||
|
line-height:11px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#app-list a span {
|
||||||
|
position:relative;
|
||||||
|
top:-4px;
|
||||||
}
|
}
|
||||||
|
|
||||||
#app-list img {
|
#app-list img {
|
||||||
@@ -857,18 +896,6 @@ div#app-list {
|
|||||||
padding-bottom:.25em;
|
padding-bottom:.25em;
|
||||||
}
|
}
|
||||||
|
|
||||||
#carouselMain {
|
|
||||||
margin: 25px 21px 0;
|
|
||||||
height:185px;
|
|
||||||
background-position:top;
|
|
||||||
background-repeat:no-repeat;
|
|
||||||
overflow:hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#carouselMain img {
|
|
||||||
margin:0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*carousel bulletin layouts*/
|
/*carousel bulletin layouts*/
|
||||||
/*460px width*/
|
/*460px width*/
|
||||||
/*185px height*/
|
/*185px height*/
|
||||||
@@ -877,24 +904,24 @@ div#app-list {
|
|||||||
width:230px;
|
width:230px;
|
||||||
height:165px;
|
height:165px;
|
||||||
overflow:hidden;
|
overflow:hidden;
|
||||||
margin:8px 0 8px 8px;
|
padding:8px 0 8px 8px;
|
||||||
}
|
}
|
||||||
.desc-right {
|
.desc-right {
|
||||||
float:left;
|
float:left;
|
||||||
width:270px;
|
width:270px;
|
||||||
margin:10px;
|
padding:10px;
|
||||||
}
|
}
|
||||||
.img-right {
|
.img-right {
|
||||||
float:right;
|
float:right;
|
||||||
width:220px;
|
width:220px;
|
||||||
height:165px;
|
height:165px;
|
||||||
overflow:hidden;
|
overflow:hidden;
|
||||||
margin:8px 8px 8px 0;
|
padding:8px 8px 8px 0;
|
||||||
}
|
}
|
||||||
.desc-left {
|
.desc-left {
|
||||||
float:right;
|
float:right;
|
||||||
width:280px;
|
width:280px;
|
||||||
margin:10px;
|
padding:10px;
|
||||||
text-align:right;
|
text-align:right;
|
||||||
}
|
}
|
||||||
.img-top {
|
.img-top {
|
||||||
|
@@ -433,11 +433,11 @@ hr {
|
|||||||
.nolist {
|
.nolist {
|
||||||
list-style:none;
|
list-style:none;
|
||||||
padding:0;
|
padding:0;
|
||||||
margin:0 0 0 1em;
|
margin:0 0 1em 1em;
|
||||||
}
|
}
|
||||||
|
|
||||||
.nolist li {
|
.nolist li {
|
||||||
padding:0;
|
padding:0 0 2px;
|
||||||
margin:0;
|
margin:0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -570,15 +570,7 @@ div.special ol li {
|
|||||||
margin:0 0 .5em;
|
margin:0 0 .5em;
|
||||||
padding:0;
|
padding:0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* old p.note, p.caution, p.warning {
|
|
||||||
margin:0 0 1em;
|
|
||||||
padding: 4px 10px;
|
|
||||||
background-color: #efefef;
|
|
||||||
border-top: 1px solid;
|
|
||||||
border-bottom: 1px solid;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
p.note, p.caution, p.warning {
|
p.note, p.caution, p.warning {
|
||||||
margin: 1em;
|
margin: 1em;
|
||||||
padding: 0 0 0 .5em;
|
padding: 0 0 0 .5em;
|
||||||
@@ -594,21 +586,20 @@ p.special-note {
|
|||||||
p.note {
|
p.note {
|
||||||
border-color: #99aacc;
|
border-color: #99aacc;
|
||||||
}
|
}
|
||||||
|
|
||||||
p.caution {
|
|
||||||
border-color: #ffcc33;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.warning {
|
p.warning {
|
||||||
border-color: #aa0033;
|
border-color: #aa0033;
|
||||||
}
|
}
|
||||||
|
|
||||||
p.warning b, p.warning em, p.warning strong {
|
p.caution {
|
||||||
color: #aa0033;
|
border-color: #ffcf00;
|
||||||
|
}
|
||||||
|
|
||||||
|
p.warning b, p.warning strong {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
|
|
||||||
li p.note, li p.warning, li p.caution {
|
li p.note, li p.warning {
|
||||||
margin: .5em 0 0 0;
|
margin: .5em 0 0 0;
|
||||||
padding: .2em .5em .2em .9em;
|
padding: .2em .5em .2em .9em;
|
||||||
}
|
}
|
||||||
@@ -681,7 +672,7 @@ pre.classic {
|
|||||||
|
|
||||||
#qv ol ol{
|
#qv ol ol{
|
||||||
list-style:none;
|
list-style:none;
|
||||||
padding: 0 0 3px 12px;
|
padding: 0 0 0 12px;
|
||||||
margin:0;
|
margin:0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -690,11 +681,14 @@ pre.classic {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#qv li {
|
#qv li {
|
||||||
padding: 0 10px;
|
padding: 0 10px 3px;
|
||||||
margin: 2 0 0;
|
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#qv li li {
|
||||||
|
padding: 3px 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
#qv ul li {
|
#qv ul li {
|
||||||
padding: 0 10px 0 0;
|
padding: 0 10px 0 0;
|
||||||
}
|
}
|
||||||
@@ -810,6 +804,63 @@ padding:0 0 0 0em;
|
|||||||
|
|
||||||
/* End sidebox sidebar element styles */
|
/* End sidebox sidebar element styles */
|
||||||
|
|
||||||
|
/* BEGIN image and caption styles (originally for UI Guidelines docs) */
|
||||||
|
|
||||||
|
table.image-caption {
|
||||||
|
padding:0;
|
||||||
|
margin:.5em 0;
|
||||||
|
border:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.image-caption-i {
|
||||||
|
font-size:92%;
|
||||||
|
padding:0;
|
||||||
|
margin:0;
|
||||||
|
border:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.image-caption-i img {
|
||||||
|
padding:0 1em;
|
||||||
|
margin:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-list {
|
||||||
|
width:24px;
|
||||||
|
text-align:center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-list .caption {
|
||||||
|
margin:0 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
td.image-caption-c {
|
||||||
|
font-size:92%;
|
||||||
|
padding:1em 2px 2px 2px;
|
||||||
|
margin:0;
|
||||||
|
border:0;
|
||||||
|
width:350px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grad-rule-top {
|
||||||
|
background-image:url(images/grad-rule-qv.png);
|
||||||
|
background-repeat:no-repeat;
|
||||||
|
padding-top:1em;
|
||||||
|
margin-top:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-caption-nested {
|
||||||
|
margin-top:0;
|
||||||
|
padding:0 0 0 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.image-caption-nested td {
|
||||||
|
padding:0 4px 2px 0;
|
||||||
|
margin:0;
|
||||||
|
border:0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* END image and caption styles */
|
||||||
|
|
||||||
/* table of contents */
|
/* table of contents */
|
||||||
|
|
||||||
ol.toc {
|
ol.toc {
|
||||||
|
@@ -4,7 +4,7 @@ var devdocNav;
|
|||||||
var sidenav;
|
var sidenav;
|
||||||
var content;
|
var content;
|
||||||
var HEADER_HEIGHT = 117;
|
var HEADER_HEIGHT = 117;
|
||||||
var cookie_style = 'android_developer';
|
var cookie_namespace = 'android_developer';
|
||||||
var NAV_PREF_TREE = "tree";
|
var NAV_PREF_TREE = "tree";
|
||||||
var NAV_PREF_PANELS = "panels";
|
var NAV_PREF_PANELS = "panels";
|
||||||
var nav_pref;
|
var nav_pref;
|
||||||
@@ -70,8 +70,8 @@ function restoreHeight(packageHeight) {
|
|||||||
$("#nav-tree").css({height:swapperHeight + "px"});
|
$("#nav-tree").css({height:swapperHeight + "px"});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCookie(cookie) {
|
function readCookie(cookie) {
|
||||||
var myCookie = cookie_style+"_"+cookie+"=";
|
var myCookie = cookie_namespace+"_"+cookie+"=";
|
||||||
if (document.cookie) {
|
if (document.cookie) {
|
||||||
var index = document.cookie.indexOf(myCookie);
|
var index = document.cookie.indexOf(myCookie);
|
||||||
if (index != -1) {
|
if (index != -1) {
|
||||||
@@ -87,16 +87,15 @@ function getCookie(cookie) {
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeCookie(cookie, val, path, expiration) {
|
function writeCookie(cookie, val, section, expiration) {
|
||||||
if (!val) return;
|
if (!val) return;
|
||||||
var date = new Date();
|
section = section == null ? "_" : "_"+section+"_";
|
||||||
date.setTime(date.getTime()+(10*365*24*60*60*1000)); // default expiration is one week
|
if (expiration == null) {
|
||||||
expiration = expiration ? expiration : date.toGMTString();
|
var date = new Date();
|
||||||
if (location.href.indexOf("/reference/") != -1) {
|
date.setTime(date.getTime()+(10*365*24*60*60*1000)); // default expiration is one week
|
||||||
document.cookie = cookie_style+'_reference_'+cookie+'='+val+'; expires='+expiration+'; path='+'/'+path;
|
expiration = date.toGMTString();
|
||||||
} else if (location.href.indexOf("/guide/") != -1) {
|
|
||||||
document.cookie = cookie_style+'_guide_'+cookie+'='+val+'; expires='+expiration+'; path='+'/'+path;
|
|
||||||
}
|
}
|
||||||
|
document.cookie = cookie_namespace+section+cookie+"="+val+"; expires="+expiration+"; path=/";
|
||||||
}
|
}
|
||||||
|
|
||||||
function init() {
|
function init() {
|
||||||
@@ -116,8 +115,8 @@ function init() {
|
|||||||
if (!isMobile) {
|
if (!isMobile) {
|
||||||
$("#resize-packages-nav").resizable({handles: "s", resize: function(e, ui) { resizeHeight(); } });
|
$("#resize-packages-nav").resizable({handles: "s", resize: function(e, ui) { resizeHeight(); } });
|
||||||
$(".side-nav-resizable").resizable({handles: "e", resize: function(e, ui) { resizeWidth(); } });
|
$(".side-nav-resizable").resizable({handles: "e", resize: function(e, ui) { resizeWidth(); } });
|
||||||
var cookieWidth = getCookie(cookiePath+'width');
|
var cookieWidth = readCookie(cookiePath+'width');
|
||||||
var cookieHeight = getCookie(cookiePath+'height');
|
var cookieHeight = readCookie(cookiePath+'height');
|
||||||
if (cookieWidth) {
|
if (cookieWidth) {
|
||||||
restoreWidth(cookieWidth);
|
restoreWidth(cookieWidth);
|
||||||
} else if ($(".side-nav-resizable").length) {
|
} else if ($(".side-nav-resizable").length) {
|
||||||
@@ -175,7 +174,9 @@ function resizeHeight() {
|
|||||||
$("#packages-nav").css({height:parseInt(resizePackagesNav.css("height")) - 6 + "px"}); //move 6px for handle
|
$("#packages-nav").css({height:parseInt(resizePackagesNav.css("height")) - 6 + "px"}); //move 6px for handle
|
||||||
devdocNav.css({height:sidenav.css("height")});
|
devdocNav.css({height:sidenav.css("height")});
|
||||||
$("#nav-tree").css({height:swapperHeight + "px"});
|
$("#nav-tree").css({height:swapperHeight + "px"});
|
||||||
writeCookie("height", resizePackagesNav.css("height"), "", null);
|
|
||||||
|
var section = location.pathname.substring(1,location.pathname.indexOf("/",1));
|
||||||
|
writeCookie("height", resizePackagesNav.css("height"), section, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
function resizeWidth() {
|
function resizeWidth() {
|
||||||
@@ -190,7 +191,9 @@ function resizeWidth() {
|
|||||||
resizePackagesNav.css({width:sidenavWidth});
|
resizePackagesNav.css({width:sidenavWidth});
|
||||||
classesNav.css({width:sidenavWidth});
|
classesNav.css({width:sidenavWidth});
|
||||||
$("#packages-nav").css({width:sidenavWidth});
|
$("#packages-nav").css({width:sidenavWidth});
|
||||||
writeCookie("width", sidenavWidth, "", null);
|
|
||||||
|
var section = location.pathname.substring(1,location.pathname.indexOf("/",1));
|
||||||
|
writeCookie("width", sidenavWidth, section, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
function resizeAll() {
|
function resizeAll() {
|
||||||
@@ -207,7 +210,7 @@ function loadLast(cookiePath) {
|
|||||||
if (location.indexOf("/"+cookiePath+"/") != -1) {
|
if (location.indexOf("/"+cookiePath+"/") != -1) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
var lastPage = getCookie(cookiePath + "_lastpage");
|
var lastPage = readCookie(cookiePath + "_lastpage");
|
||||||
if (lastPage) {
|
if (lastPage) {
|
||||||
window.location = lastPage;
|
window.location = lastPage;
|
||||||
return false;
|
return false;
|
||||||
@@ -216,11 +219,11 @@ function loadLast(cookiePath) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
$(window).unload(function(){
|
$(window).unload(function(){
|
||||||
var href = location.href;
|
var path = location.pathname;
|
||||||
if (href.indexOf("/reference/") != -1) {
|
if (path.indexOf("/reference/") != -1) {
|
||||||
writeCookie("lastpage", href, "", null);
|
writeCookie("lastpage", path, "reference", null);
|
||||||
} else if (href.indexOf("/guide/") != -1) {
|
} else if (path.indexOf("/guide/") != -1) {
|
||||||
writeCookie("lastpage", href, "", null);
|
writeCookie("lastpage", path, "guide", null);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -257,7 +260,7 @@ function buildToggleLists() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getNavPref() {
|
function getNavPref() {
|
||||||
var v = getCookie('reference_nav');
|
var v = readCookie('reference_nav');
|
||||||
if (v != NAV_PREF_TREE) {
|
if (v != NAV_PREF_TREE) {
|
||||||
v = NAV_PREF_PANELS;
|
v = NAV_PREF_PANELS;
|
||||||
}
|
}
|
||||||
@@ -283,7 +286,7 @@ function swapNav() {
|
|||||||
}
|
}
|
||||||
var date = new Date();
|
var date = new Date();
|
||||||
date.setTime(date.getTime()+(10*365*24*60*60*1000)); // keep this for 10 years
|
date.setTime(date.getTime()+(10*365*24*60*60*1000)); // keep this for 10 years
|
||||||
writeCookie("nav", nav_pref, "", date.toGMTString());
|
writeCookie("nav", nav_pref, null, date.toGMTString());
|
||||||
|
|
||||||
$("#nav-panels").toggle();
|
$("#nav-panels").toggle();
|
||||||
$("#panel-link").toggle();
|
$("#panel-link").toggle();
|
||||||
@@ -349,3 +352,57 @@ function toggleAllSummaryInherited(linkObj) {
|
|||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function changeTabLang(lang) {
|
||||||
|
var nodes = $("#header-tabs").find("."+lang);
|
||||||
|
for (i=0; i < nodes.length; i++) { // for each node in this language
|
||||||
|
var node = $(nodes[i]);
|
||||||
|
node.siblings().css("display","none"); // hide all siblings
|
||||||
|
if (node.not(":empty").length != 0) { //if this languages node has a translation, show it
|
||||||
|
node.css("display","inline");
|
||||||
|
} else { //otherwise, show English instead
|
||||||
|
node.css("display","none");
|
||||||
|
node.siblings().filter(".en").css("display","inline");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function changeNavLang(lang) {
|
||||||
|
var nodes = $("#side-nav").find("."+lang);
|
||||||
|
for (i=0; i < nodes.length; i++) { // for each node in this language
|
||||||
|
var node = $(nodes[i]);
|
||||||
|
node.siblings().css("display","none"); // hide all siblings
|
||||||
|
if (node.not(":empty").length != 0) { // if this languages node has a translation, show it
|
||||||
|
node.css("display","inline");
|
||||||
|
} else { // otherwise, show English instead
|
||||||
|
node.css("display","none");
|
||||||
|
node.siblings().filter(".en").css("display","inline");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function changeDocLang(lang) {
|
||||||
|
changeTabLang(lang);
|
||||||
|
changeNavLang(lang);
|
||||||
|
}
|
||||||
|
|
||||||
|
function changeLangPref(lang) {
|
||||||
|
var date = new Date();
|
||||||
|
date.setTime(date.getTime()+(50*365*24*60*60*1000)); // keep this for 50 years
|
||||||
|
writeCookie("pref_lang", lang, null, date);
|
||||||
|
|
||||||
|
changeDocLang(lang);
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadLangPref() {
|
||||||
|
var lang = readCookie("pref_lang");
|
||||||
|
if (lang != 0) {
|
||||||
|
$("#language").find("option[value='"+lang+"']").attr("selected",true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLangPref() {
|
||||||
|
return $("#language").find(":selected").attr("value");
|
||||||
|
}
|
||||||
|
|
||||||
|
BIN
tools/droiddoc/templates/assets/images/home/bg_home_announcement.png
Executable file
BIN
tools/droiddoc/templates/assets/images/home/bg_home_announcement.png
Executable file
Binary file not shown.
After Width: | Height: | Size: 2.8 KiB |
BIN
tools/droiddoc/templates/assets/images/home/bg_home_carousel.png
Executable file
BIN
tools/droiddoc/templates/assets/images/home/bg_home_carousel.png
Executable file
Binary file not shown.
After Width: | Height: | Size: 3.6 KiB |
Binary file not shown.
Before Width: | Height: | Size: 3.5 KiB |
BIN
tools/droiddoc/templates/assets/images/icon_guidelines_logo.png
Normal file
BIN
tools/droiddoc/templates/assets/images/icon_guidelines_logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 43 KiB |
BIN
tools/droiddoc/templates/assets/images/open_source.png
Executable file
BIN
tools/droiddoc/templates/assets/images/open_source.png
Executable file
Binary file not shown.
After Width: | Height: | Size: 6.2 KiB |
BIN
tools/droiddoc/templates/assets/images/uiguidelines1.png
Normal file
BIN
tools/droiddoc/templates/assets/images/uiguidelines1.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 5.1 KiB |
@@ -168,6 +168,6 @@ function search_focus_changed(obj, focused)
|
|||||||
|
|
||||||
function submit_search() {
|
function submit_search() {
|
||||||
var query = document.getElementById('search_autocomplete').value;
|
var query = document.getElementById('search_autocomplete').value;
|
||||||
document.location = '/search.html#q=' + query;
|
document.location = toRoot + 'search.html#q=' + query; // toRoot is initialized in android-developer-docs.js
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@@ -2,6 +2,10 @@
|
|||||||
<?cs # Use the -templatedir arg to javadoc to set your own directory with a ?>
|
<?cs # Use the -templatedir arg to javadoc to set your own directory with a ?>
|
||||||
<?cs # replacement for this file in it. ?>
|
<?cs # replacement for this file in it. ?>
|
||||||
|
|
||||||
|
|
||||||
|
<?cs def:default_search_box() ?><?cs /def ?>
|
||||||
|
<?cs def:default_left_nav() ?><?cs /def ?>
|
||||||
|
|
||||||
<?cs # appears at the top of every page ?><?cs
|
<?cs # appears at the top of every page ?><?cs
|
||||||
def:custom_masthead() ?>
|
def:custom_masthead() ?>
|
||||||
<div id="header">
|
<div id="header">
|
||||||
@@ -9,7 +13,9 @@ def:custom_masthead() ?>
|
|||||||
<a href="<?cs var:toroot ?>index.html" tabindex="-1"><?cs var:page.title ?></a>
|
<a href="<?cs var:toroot ?>index.html" tabindex="-1"><?cs var:page.title ?></a>
|
||||||
</div>
|
</div>
|
||||||
<div id="headerRight">
|
<div id="headerRight">
|
||||||
<?cs call:default_search_box() ?>
|
<?cs if:!online-pdk ?>
|
||||||
|
<?cs call:default_search_box() ?>
|
||||||
|
<?cs /if ?>
|
||||||
</div><!-- headerRight -->
|
</div><!-- headerRight -->
|
||||||
</div><!-- header --><?cs
|
</div><!-- header --><?cs
|
||||||
/def ?>
|
/def ?>
|
||||||
@@ -21,4 +27,4 @@ def:custom_masthead() ?>
|
|||||||
<?cs def:custom_buildinfo() ?>Build <?cs var:page.build ?> - <?cs var:page.now ?><?cs /def ?>
|
<?cs def:custom_buildinfo() ?>Build <?cs var:page.build ?> - <?cs var:page.now ?><?cs /def ?>
|
||||||
|
|
||||||
<?cs # appears on the side of the page ?>
|
<?cs # appears on the side of the page ?>
|
||||||
<?cs def:custom_left_nav() ?><?cs call:default_left_nav() ?><?cs /def ?>
|
<?cs def:custom_left_nav() ?><?cs call:default_left_nav() ?><?cs /def ?>
|
@@ -3,10 +3,7 @@
|
|||||||
<link rel="shortcut icon" type="image/x-icon" href="<?cs var:toroot ?>favicon.ico" />
|
<link rel="shortcut icon" type="image/x-icon" href="<?cs var:toroot ?>favicon.ico" />
|
||||||
<title><?cs
|
<title><?cs
|
||||||
if:page.title ?><?cs
|
if:page.title ?><?cs
|
||||||
var:page.title ?><?cs
|
var:page.title ?> | <?cs
|
||||||
if:sdk.version ?> (<?cs
|
|
||||||
var:sdk.version ?>)<?cs
|
|
||||||
/if ?> | <?cs
|
|
||||||
/if ?>Android Developers</title><?cs
|
/if ?>Android Developers</title><?cs
|
||||||
if:guide||sdk ?>
|
if:guide||sdk ?>
|
||||||
<link href="<?cs var:toroot ?>assets/android-developer-docs-devguide.css" rel="stylesheet" type="text/css" /><?cs
|
<link href="<?cs var:toroot ?>assets/android-developer-docs-devguide.css" rel="stylesheet" type="text/css" /><?cs
|
||||||
|
@@ -61,6 +61,9 @@ def:tag_list(tags) ?><?cs
|
|||||||
elif:tag.name == "@sample" ?><pre class="Code prettyprint"><?cs var:tag.text ?></pre><?cs
|
elif:tag.name == "@sample" ?><pre class="Code prettyprint"><?cs var:tag.text ?></pre><?cs
|
||||||
elif:tag.name == "@include" ?><?cs var:tag.text ?><?cs
|
elif:tag.name == "@include" ?><?cs var:tag.text ?><?cs
|
||||||
elif:tag.kind == "@docRoot" ?><?cs var:toroot ?><?cs
|
elif:tag.kind == "@docRoot" ?><?cs var:toroot ?><?cs
|
||||||
|
elif:tag.kind == "@sdkCurrent" ?><?cs var:sdk.current ?><?cs
|
||||||
|
elif:tag.kind == "@sdkCurrentVersion" ?><?cs var:sdk.version ?><?cs
|
||||||
|
elif:tag.kind == "@sdkCurrentRelId" ?><?cs var:sdk.rel.id ?><?cs
|
||||||
elif:tag.kind == "@inheritDoc" ?><?cs # This is the case when @inheritDoc is in something
|
elif:tag.kind == "@inheritDoc" ?><?cs # This is the case when @inheritDoc is in something
|
||||||
that doesn't inherit from anything?><?cs
|
that doesn't inherit from anything?><?cs
|
||||||
elif:tag.kind == "@attr" ?><?cs
|
elif:tag.kind == "@attr" ?><?cs
|
||||||
@@ -230,108 +233,5 @@ def:expandable_class_list(id, classes, default) ?>
|
|||||||
</div><?cs
|
</div><?cs
|
||||||
/def ?>
|
/def ?>
|
||||||
|
|
||||||
<?cs # The default side navigation for the reference docs ?><?cs
|
|
||||||
def:default_left_nav() ?>
|
|
||||||
<div class="g-section g-tpl-240" id="body-content">
|
|
||||||
<div class="g-unit g-first side-nav-resizable" id="side-nav">
|
|
||||||
<div id="swapper">
|
|
||||||
<div id="nav-panels">
|
|
||||||
<div id="resize-packages-nav">
|
|
||||||
<div id="packages-nav">
|
|
||||||
<div id="index-links"><nobr>
|
|
||||||
<a href="<?cs var:toroot ?>reference/packages.html" <?cs if:(page.title == "Package Index") ?>class="selected"<?cs /if ?> >Package Index</a> |
|
|
||||||
<a href="<?cs var:toroot ?>reference/classes.html" <?cs if:(page.title == "Class Index") ?>class="selected"<?cs /if ?>>Class Index</a></nobr>
|
|
||||||
</div>
|
|
||||||
<ul><?cs
|
|
||||||
each:pkg=docs.packages ?>
|
|
||||||
<li <?cs if:(class.package.name == pkg.name) || (package.name == pkg.name)?>class="selected"<?cs /if ?>><?cs call:package_link(pkg) ?></li><?cs
|
|
||||||
/each ?>
|
|
||||||
</ul><br/>
|
|
||||||
</div> <!-- end packages -->
|
|
||||||
</div> <!-- end resize-packages -->
|
|
||||||
<div id="classes-nav"><?cs
|
|
||||||
if:subcount(class.package) ?>
|
|
||||||
<ul>
|
|
||||||
<?cs call:list("Interfaces", class.package.interfaces) ?>
|
|
||||||
<?cs call:list("Classes", class.package.classes) ?>
|
|
||||||
<?cs call:list("Enums", class.package.enums) ?>
|
|
||||||
<?cs call:list("Exceptions", class.package.exceptions) ?>
|
|
||||||
<?cs call:list("Errors", class.package.errors) ?>
|
|
||||||
</ul><?cs
|
|
||||||
elif:subcount(package) ?>
|
|
||||||
<ul>
|
|
||||||
<?cs call:class_link_list("Interfaces", package.interfaces) ?>
|
|
||||||
<?cs call:class_link_list("Classes", package.classes) ?>
|
|
||||||
<?cs call:class_link_list("Enums", package.enums) ?>
|
|
||||||
<?cs call:class_link_list("Exceptions", package.exceptions) ?>
|
|
||||||
<?cs call:class_link_list("Errors", package.errors) ?>
|
|
||||||
</ul><?cs
|
|
||||||
else ?>
|
|
||||||
<script>
|
|
||||||
/*addLoadEvent(maxPackageHeight);*/
|
|
||||||
</script>
|
|
||||||
<p style="padding:10px">Select a package to view its members</p><?cs
|
|
||||||
/if ?><br/>
|
|
||||||
</div><!-- end classes -->
|
|
||||||
</div><!-- end nav-panels -->
|
|
||||||
<div id="nav-tree" style="display:none">
|
|
||||||
<div id="index-links"><nobr>
|
|
||||||
<a href="<?cs var:toroot ?>reference/packages.html" <?cs if:(page.title == "Package Index") ?>class="selected"<?cs /if ?> >Package Index</a> |
|
|
||||||
<a href="<?cs var:toroot ?>reference/classes.html" <?cs if:(page.title == "Class Index") ?>class="selected"<?cs /if ?>>Class Index</a></nobr>
|
|
||||||
</div>
|
|
||||||
</div><!-- end nav-tree -->
|
|
||||||
</div><!-- end swapper -->
|
|
||||||
</div> <!-- end side-nav -->
|
|
||||||
<script>
|
|
||||||
if (!isMobile) {
|
|
||||||
$("<a href='#' id='nav-swap' onclick='swapNav();return false;' style='font-size:10px;line-height:9px;margin-left:1em;text-decoration:none;'><span id='tree-link'>Use Tree Navigation</span><span id='panel-link' style='display:none'>Use Panel Navigation</span></a>").appendTo("#side-nav");
|
|
||||||
chooseDefaultNav();
|
|
||||||
if ($("#nav-tree").is(':visible')) init_navtree("nav-tree", "<?cs var:toroot ?>", NAVTREE_DATA);
|
|
||||||
else {
|
|
||||||
addLoadEvent(function() {
|
|
||||||
scrollIntoView("packages-nav");
|
|
||||||
scrollIntoView("classes-nav");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
$("#swapper").css({borderBottom:"2px solid #aaa"});
|
|
||||||
} else {
|
|
||||||
swapNav(); // tree view should be used on mobile
|
|
||||||
}
|
|
||||||
</script><?cs
|
|
||||||
/def ?>
|
|
||||||
|
|
||||||
<?cs # The default search box that goes in the header ?><?cs
|
|
||||||
def:default_search_box() ?>
|
|
||||||
<div id="search" >
|
|
||||||
<div id="searchForm">
|
|
||||||
<form accept-charset="utf-8" class="gsc-search-box"
|
|
||||||
onsubmit="return submit_search()">
|
|
||||||
<table class="gsc-search-box" cellpadding="0" cellspacing="0"><tbody>
|
|
||||||
<tr>
|
|
||||||
<td class="gsc-input">
|
|
||||||
<input id="search_autocomplete" class="gsc-input" type="text" size="33" autocomplete="off"
|
|
||||||
title="search developer docs" name="q"
|
|
||||||
value="search developer docs"
|
|
||||||
onFocus="search_focus_changed(this, true)"
|
|
||||||
onBlur="search_focus_changed(this, false)"
|
|
||||||
onkeydown="return search_changed(event, true, '<?cs var:toroot?>')"
|
|
||||||
onkeyup="return search_changed(event, false, '<?cs var:toroot?>')" />
|
|
||||||
<div id="search_filtered_div" class="no-display">
|
|
||||||
<table id="search_filtered" cellspacing=0>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
<td class="gsc-search-button">
|
|
||||||
<input type="submit" value="Search" title="search" id="search-button" class="gsc-search-button" />
|
|
||||||
</td>
|
|
||||||
<td class="gsc-clear-button">
|
|
||||||
<div title="clear results" class="gsc-clear-button"> </div>
|
|
||||||
</td>
|
|
||||||
</tr></tbody>
|
|
||||||
</table>
|
|
||||||
</form>
|
|
||||||
</div><!-- searchForm -->
|
|
||||||
</div><!-- search --><?cs
|
|
||||||
/def ?>
|
|
||||||
|
|
||||||
<?cs include:"customization.cs" ?>
|
<?cs include:"customization.cs" ?>
|
||||||
|
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
<div id="jd-content">
|
<div id="jd-content">
|
||||||
|
|
||||||
<p><a href="<?cs var:realFile ?>">Original <?cs var:realFile ?></a></p>
|
<p>The file containing the source code shown below is located in the corresponding directory in <code><sdk>/platforms/android-<version>/samples/...</code></p>
|
||||||
|
|
||||||
<!-- begin file contents -->
|
<!-- begin file contents -->
|
||||||
<pre class="Code prettyprint"><?cs var:fileContents ?></pre>
|
<pre class="Code prettyprint"><?cs var:fileContents ?></pre>
|
||||||
|
205
tools/releasetools/amend_generator.py
Normal file
205
tools/releasetools/amend_generator.py
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import common
|
||||||
|
|
||||||
|
class AmendGenerator(object):
|
||||||
|
"""Class to generate scripts in the 'amend' recovery script language
|
||||||
|
used up through cupcake."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.script = ['assert compatible_with("0.2") == "true"']
|
||||||
|
self.included_files = set()
|
||||||
|
|
||||||
|
def MakeTemporary(self):
|
||||||
|
"""Make a temporary script object whose commands can latter be
|
||||||
|
appended to the parent script with AppendScript(). Used when the
|
||||||
|
caller wants to generate script commands out-of-order."""
|
||||||
|
x = AmendGenerator()
|
||||||
|
x.script = []
|
||||||
|
x.included_files = self.included_files
|
||||||
|
return x
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _FileRoot(fn):
|
||||||
|
"""Convert a file path to the 'root' notation used by amend."""
|
||||||
|
if fn.startswith("/system/"):
|
||||||
|
return "SYSTEM:" + fn[8:]
|
||||||
|
elif fn == "/system":
|
||||||
|
return "SYSTEM:"
|
||||||
|
elif fn.startswith("/tmp/"):
|
||||||
|
return "CACHE:.." + fn
|
||||||
|
else:
|
||||||
|
raise ValueError("don't know root for \"%s\"" % (fn,))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _PartitionRoot(partition):
|
||||||
|
"""Convert a partition name to the 'root' notation used by amend."""
|
||||||
|
if partition == "userdata":
|
||||||
|
return "DATA:"
|
||||||
|
else:
|
||||||
|
return partition.upper() + ":"
|
||||||
|
|
||||||
|
def AppendScript(self, other):
|
||||||
|
"""Append the contents of another script (which should be created
|
||||||
|
with temporary=True) to this one."""
|
||||||
|
self.script.extend(other.script)
|
||||||
|
self.included_files.update(other.included_files)
|
||||||
|
|
||||||
|
def AssertSomeFingerprint(self, *fp):
|
||||||
|
"""Assert that the current fingerprint is one of *fp."""
|
||||||
|
x = [('file_contains("SYSTEM:build.prop", '
|
||||||
|
'"ro.build.fingerprint=%s") == "true"') % i for i in fp]
|
||||||
|
self.script.append("assert %s" % (" || ".join(x),))
|
||||||
|
|
||||||
|
def AssertOlderBuild(self, timestamp):
|
||||||
|
"""Assert that the build on the device is older (or the same as)
|
||||||
|
the given timestamp."""
|
||||||
|
self.script.append("run_program PACKAGE:check_prereq %s" % (timestamp,))
|
||||||
|
self.included_files.add("check_prereq")
|
||||||
|
|
||||||
|
def AssertDevice(self, device):
|
||||||
|
"""Assert that the device identifier is the given string."""
|
||||||
|
self.script.append('assert getprop("ro.product.device") == "%s" || '
|
||||||
|
'getprop("ro.build.product") == "%s"' % (device, device))
|
||||||
|
|
||||||
|
def AssertSomeBootloader(self, *bootloaders):
|
||||||
|
"""Asert that the bootloader version is one of *bootloaders."""
|
||||||
|
self.script.append("assert " +
|
||||||
|
" || ".join(['getprop("ro.bootloader") == "%s"' % (b,)
|
||||||
|
for b in bootloaders]))
|
||||||
|
|
||||||
|
def ShowProgress(self, frac, dur):
|
||||||
|
"""Update the progress bar, advancing it over 'frac' over the next
|
||||||
|
'dur' seconds."""
|
||||||
|
self.script.append("show_progress %f %d" % (frac, int(dur)))
|
||||||
|
|
||||||
|
def PatchCheck(self, filename, *sha1):
|
||||||
|
"""Check that the given file (or MTD reference) has one of the
|
||||||
|
given *sha1 hashes."""
|
||||||
|
out = ["run_program PACKAGE:applypatch -c %s" % (filename,)]
|
||||||
|
for i in sha1:
|
||||||
|
out.append(" " + i)
|
||||||
|
self.script.append("".join(out))
|
||||||
|
self.included_files.add("applypatch")
|
||||||
|
|
||||||
|
def CacheFreeSpaceCheck(self, amount):
|
||||||
|
"""Check that there's at least 'amount' space that can be made
|
||||||
|
available on /cache."""
|
||||||
|
self.script.append("run_program PACKAGE:applypatch -s %d" % (amount,))
|
||||||
|
self.included_files.add("applypatch")
|
||||||
|
|
||||||
|
def Mount(self, kind, what, path):
|
||||||
|
# no-op; amend uses it's 'roots' system to automatically mount
|
||||||
|
# things when they're referred to
|
||||||
|
pass
|
||||||
|
|
||||||
|
def UnpackPackageDir(self, src, dst):
|
||||||
|
"""Unpack a given directory from the OTA package into the given
|
||||||
|
destination directory."""
|
||||||
|
dst = self._FileRoot(dst)
|
||||||
|
self.script.append("copy_dir PACKAGE:%s %s" % (src, dst))
|
||||||
|
|
||||||
|
def Comment(self, comment):
|
||||||
|
"""Write a comment into the update script."""
|
||||||
|
self.script.append("")
|
||||||
|
for i in comment.split("\n"):
|
||||||
|
self.script.append("# " + i)
|
||||||
|
self.script.append("")
|
||||||
|
|
||||||
|
def Print(self, message):
|
||||||
|
"""Log a message to the screen (if the logs are visible)."""
|
||||||
|
# no way to do this from amend; substitute a script comment instead
|
||||||
|
self.Comment(message)
|
||||||
|
|
||||||
|
def FormatPartition(self, partition):
|
||||||
|
"""Format the given MTD partition."""
|
||||||
|
self.script.append("format %s" % (self._PartitionRoot(partition),))
|
||||||
|
|
||||||
|
def DeleteFiles(self, file_list):
|
||||||
|
"""Delete all files in file_list."""
|
||||||
|
line = []
|
||||||
|
t = 0
|
||||||
|
for i in file_list:
|
||||||
|
i = self._FileRoot(i)
|
||||||
|
line.append(i)
|
||||||
|
t += len(i) + 1
|
||||||
|
if t > 80:
|
||||||
|
self.script.append("delete " + " ".join(line))
|
||||||
|
line = []
|
||||||
|
t = 0
|
||||||
|
if line:
|
||||||
|
self.script.append("delete " + " ".join(line))
|
||||||
|
|
||||||
|
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
|
||||||
|
"""Apply binary patches (in *patchpairs) to the given srcfile to
|
||||||
|
produce tgtfile (which may be "-" to indicate overwriting the
|
||||||
|
source file."""
|
||||||
|
if len(patchpairs) % 2 != 0:
|
||||||
|
raise ValueError("bad patches given to ApplyPatch")
|
||||||
|
self.script.append(
|
||||||
|
("run_program PACKAGE:applypatch %s %s %s %d " %
|
||||||
|
(srcfile, tgtfile, tgtsha1, tgtsize)) +
|
||||||
|
" ".join(["%s:%s" % patchpairs[i:i+2]
|
||||||
|
for i in range(0, len(patchpairs), 2)]))
|
||||||
|
self.included_files.add("applypatch")
|
||||||
|
|
||||||
|
def WriteFirmwareImage(self, kind, fn):
|
||||||
|
"""Arrange to update the given firmware image (kind must be
|
||||||
|
"hboot" or "radio") when recovery finishes."""
|
||||||
|
self.script.append("write_%s_image PACKAGE:%s" % (kind, fn))
|
||||||
|
|
||||||
|
def WriteRawImage(self, partition, fn):
|
||||||
|
"""Write the given file into the given MTD partition."""
|
||||||
|
self.script.append("write_raw_image PACKAGE:%s %s" %
|
||||||
|
(fn, self._PartitionRoot(partition)))
|
||||||
|
|
||||||
|
def SetPermissions(self, fn, uid, gid, mode):
|
||||||
|
"""Set file ownership and permissions."""
|
||||||
|
fn = self._FileRoot(fn)
|
||||||
|
self.script.append("set_perm %d %d 0%o %s" % (uid, gid, mode, fn))
|
||||||
|
|
||||||
|
def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode):
|
||||||
|
"""Recursively set path ownership and permissions."""
|
||||||
|
fn = self._FileRoot(fn)
|
||||||
|
self.script.append("set_perm_recursive %d %d 0%o 0%o %s" %
|
||||||
|
(uid, gid, dmode, fmode, fn))
|
||||||
|
|
||||||
|
def MakeSymlinks(self, symlink_list):
|
||||||
|
"""Create symlinks, given a list of (dest, link) pairs."""
|
||||||
|
self.script.extend(["symlink %s %s" % (i[0], self._FileRoot(i[1]))
|
||||||
|
for i in sorted(symlink_list)])
|
||||||
|
|
||||||
|
def AppendExtra(self, extra):
|
||||||
|
"""Append text verbatim to the output script."""
|
||||||
|
self.script.append(extra)
|
||||||
|
|
||||||
|
def AddToZip(self, input_zip, output_zip, input_path=None):
|
||||||
|
"""Write the accumulated script to the output_zip file. input_zip
|
||||||
|
is used as the source for any ancillary binaries needed by the
|
||||||
|
script. If input_path is not None, it will be used as a local
|
||||||
|
path for binaries instead of input_zip."""
|
||||||
|
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-script",
|
||||||
|
"\n".join(self.script) + "\n")
|
||||||
|
for i in self.included_files:
|
||||||
|
try:
|
||||||
|
if input_path is None:
|
||||||
|
data = input_zip.read(os.path.join("OTA/bin", i))
|
||||||
|
else:
|
||||||
|
data = open(os.path.join(input_path, i)).read()
|
||||||
|
common.ZipWriteStr(output_zip, i, data, perms=0755)
|
||||||
|
except (IOError, KeyError), e:
|
||||||
|
raise ExternalError("unable to include binary %s: %s" % (i, e))
|
@@ -12,6 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import errno
|
||||||
import getopt
|
import getopt
|
||||||
import getpass
|
import getpass
|
||||||
import os
|
import os
|
||||||
@@ -20,6 +21,7 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import zipfile
|
||||||
|
|
||||||
# missing in Python 2.4 and before
|
# missing in Python 2.4 and before
|
||||||
if not hasattr(os, "SEEK_SET"):
|
if not hasattr(os, "SEEK_SET"):
|
||||||
@@ -27,7 +29,7 @@ if not hasattr(os, "SEEK_SET"):
|
|||||||
|
|
||||||
class Options(object): pass
|
class Options(object): pass
|
||||||
OPTIONS = Options()
|
OPTIONS = Options()
|
||||||
OPTIONS.signapk_jar = "out/host/linux-x86/framework/signapk.jar"
|
OPTIONS.search_path = "out/host/linux-x86"
|
||||||
OPTIONS.max_image_size = {}
|
OPTIONS.max_image_size = {}
|
||||||
OPTIONS.verbose = False
|
OPTIONS.verbose = False
|
||||||
OPTIONS.tempfiles = []
|
OPTIONS.tempfiles = []
|
||||||
@@ -61,40 +63,62 @@ def LoadBoardConfig(fn):
|
|||||||
def BuildAndAddBootableImage(sourcedir, targetname, output_zip):
|
def BuildAndAddBootableImage(sourcedir, targetname, output_zip):
|
||||||
"""Take a kernel, cmdline, and ramdisk directory from the input (in
|
"""Take a kernel, cmdline, and ramdisk directory from the input (in
|
||||||
'sourcedir'), and turn them into a boot image. Put the boot image
|
'sourcedir'), and turn them into a boot image. Put the boot image
|
||||||
into the output zip file under the name 'targetname'."""
|
into the output zip file under the name 'targetname'. Returns
|
||||||
|
targetname on success or None on failure (if sourcedir does not
|
||||||
|
appear to contain files for the requested image)."""
|
||||||
|
|
||||||
print "creating %s..." % (targetname,)
|
print "creating %s..." % (targetname,)
|
||||||
|
|
||||||
img = BuildBootableImage(sourcedir)
|
img = BuildBootableImage(sourcedir)
|
||||||
|
if img is None:
|
||||||
|
return None
|
||||||
|
|
||||||
CheckSize(img, targetname)
|
CheckSize(img, targetname)
|
||||||
output_zip.writestr(targetname, img)
|
ZipWriteStr(output_zip, targetname, img)
|
||||||
|
return targetname
|
||||||
|
|
||||||
def BuildBootableImage(sourcedir):
|
def BuildBootableImage(sourcedir):
|
||||||
"""Take a kernel, cmdline, and ramdisk directory from the input (in
|
"""Take a kernel, cmdline, and ramdisk directory from the input (in
|
||||||
'sourcedir'), and turn them into a boot image. Return the image data."""
|
'sourcedir'), and turn them into a boot image. Return the image
|
||||||
|
data, or None if sourcedir does not appear to contains files for
|
||||||
|
building the requested image."""
|
||||||
|
|
||||||
|
if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
|
||||||
|
not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
|
||||||
|
return None
|
||||||
|
|
||||||
ramdisk_img = tempfile.NamedTemporaryFile()
|
ramdisk_img = tempfile.NamedTemporaryFile()
|
||||||
img = tempfile.NamedTemporaryFile()
|
img = tempfile.NamedTemporaryFile()
|
||||||
|
|
||||||
p1 = Run(["mkbootfs", os.path.join(sourcedir, "RAMDISK")],
|
p1 = Run(["mkbootfs", os.path.join(sourcedir, "RAMDISK")],
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
p2 = Run(["gzip", "-n"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
|
p2 = Run(["minigzip"],
|
||||||
|
stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
|
||||||
|
|
||||||
p2.wait()
|
p2.wait()
|
||||||
p1.wait()
|
p1.wait()
|
||||||
assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,)
|
assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,)
|
||||||
assert p2.returncode == 0, "gzip of %s ramdisk failed" % (targetname,)
|
assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,)
|
||||||
|
|
||||||
cmdline = open(os.path.join(sourcedir, "cmdline")).read().rstrip("\n")
|
cmd = ["mkbootimg", "--kernel", os.path.join(sourcedir, "kernel")]
|
||||||
p = Run(["mkbootimg",
|
|
||||||
"--kernel", os.path.join(sourcedir, "kernel"),
|
fn = os.path.join(sourcedir, "cmdline")
|
||||||
"--cmdline", cmdline,
|
if os.access(fn, os.F_OK):
|
||||||
"--ramdisk", ramdisk_img.name,
|
cmd.append("--cmdline")
|
||||||
"--output", img.name],
|
cmd.append(open(fn).read().rstrip("\n"))
|
||||||
stdout=subprocess.PIPE)
|
|
||||||
|
fn = os.path.join(sourcedir, "base")
|
||||||
|
if os.access(fn, os.F_OK):
|
||||||
|
cmd.append("--base")
|
||||||
|
cmd.append(open(fn).read().rstrip("\n"))
|
||||||
|
|
||||||
|
cmd.extend(["--ramdisk", ramdisk_img.name,
|
||||||
|
"--output", img.name])
|
||||||
|
|
||||||
|
p = Run(cmd, stdout=subprocess.PIPE)
|
||||||
p.communicate()
|
p.communicate()
|
||||||
assert p.returncode == 0, "mkbootimg of %s image failed" % (targetname,)
|
assert p.returncode == 0, "mkbootimg of %s image failed" % (
|
||||||
|
os.path.basename(sourcedir),)
|
||||||
|
|
||||||
img.seek(os.SEEK_SET, 0)
|
img.seek(os.SEEK_SET, 0)
|
||||||
data = img.read()
|
data = img.read()
|
||||||
@@ -131,22 +155,30 @@ def GetKeyPasswords(keylist):
|
|||||||
those which require them. Return a {key: password} dict. password
|
those which require them. Return a {key: password} dict. password
|
||||||
will be None if the key has no password."""
|
will be None if the key has no password."""
|
||||||
|
|
||||||
key_passwords = {}
|
no_passwords = []
|
||||||
|
need_passwords = []
|
||||||
devnull = open("/dev/null", "w+b")
|
devnull = open("/dev/null", "w+b")
|
||||||
for k in sorted(keylist):
|
for k in sorted(keylist):
|
||||||
p = subprocess.Popen(["openssl", "pkcs8", "-in", k+".pk8",
|
# An empty-string key is used to mean don't re-sign this package.
|
||||||
"-inform", "DER", "-nocrypt"],
|
# Obviously we don't need a password for this non-key.
|
||||||
stdin=devnull.fileno(),
|
if not k:
|
||||||
stdout=devnull.fileno(),
|
no_passwords.append(k)
|
||||||
stderr=subprocess.STDOUT)
|
continue
|
||||||
|
|
||||||
|
p = Run(["openssl", "pkcs8", "-in", k+".pk8",
|
||||||
|
"-inform", "DER", "-nocrypt"],
|
||||||
|
stdin=devnull.fileno(),
|
||||||
|
stdout=devnull.fileno(),
|
||||||
|
stderr=subprocess.STDOUT)
|
||||||
p.communicate()
|
p.communicate()
|
||||||
if p.returncode == 0:
|
if p.returncode == 0:
|
||||||
print "%s.pk8 does not require a password" % (k,)
|
no_passwords.append(k)
|
||||||
key_passwords[k] = None
|
|
||||||
else:
|
else:
|
||||||
key_passwords[k] = getpass.getpass("Enter password for %s.pk8> " % (k,))
|
need_passwords.append(k)
|
||||||
devnull.close()
|
devnull.close()
|
||||||
print
|
|
||||||
|
key_passwords = PasswordManager().GetPasswords(need_passwords)
|
||||||
|
key_passwords.update(dict.fromkeys(no_passwords, None))
|
||||||
return key_passwords
|
return key_passwords
|
||||||
|
|
||||||
|
|
||||||
@@ -167,12 +199,13 @@ def SignFile(input_name, output_name, key, password, align=None):
|
|||||||
else:
|
else:
|
||||||
sign_name = output_name
|
sign_name = output_name
|
||||||
|
|
||||||
p = subprocess.Popen(["java", "-jar", OPTIONS.signapk_jar,
|
p = Run(["java", "-jar",
|
||||||
key + ".x509.pem",
|
os.path.join(OPTIONS.search_path, "framework", "signapk.jar"),
|
||||||
key + ".pk8",
|
key + ".x509.pem",
|
||||||
input_name, sign_name],
|
key + ".pk8",
|
||||||
stdin=subprocess.PIPE,
|
input_name, sign_name],
|
||||||
stdout=subprocess.PIPE)
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
if password is not None:
|
if password is not None:
|
||||||
password += "\n"
|
password += "\n"
|
||||||
p.communicate(password)
|
p.communicate(password)
|
||||||
@@ -180,7 +213,7 @@ def SignFile(input_name, output_name, key, password, align=None):
|
|||||||
raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
|
raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
|
||||||
|
|
||||||
if align:
|
if align:
|
||||||
p = subprocess.Popen(["zipalign", "-f", str(align), sign_name, output_name])
|
p = Run(["zipalign", "-f", str(align), sign_name, output_name])
|
||||||
p.communicate()
|
p.communicate()
|
||||||
if p.returncode != 0:
|
if p.returncode != 0:
|
||||||
raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
|
raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
|
||||||
@@ -209,8 +242,8 @@ def CheckSize(data, target):
|
|||||||
|
|
||||||
COMMON_DOCSTRING = """
|
COMMON_DOCSTRING = """
|
||||||
-p (--path) <dir>
|
-p (--path) <dir>
|
||||||
Prepend <dir> to the list of places to search for binaries run
|
Prepend <dir>/bin to the list of places to search for binaries
|
||||||
by this script.
|
run by this script, and expect to find jars in <dir>/framework.
|
||||||
|
|
||||||
-v (--verbose)
|
-v (--verbose)
|
||||||
Show command lines being executed.
|
Show command lines being executed.
|
||||||
@@ -252,15 +285,13 @@ def ParseOptions(argv,
|
|||||||
elif o in ("-v", "--verbose"):
|
elif o in ("-v", "--verbose"):
|
||||||
OPTIONS.verbose = True
|
OPTIONS.verbose = True
|
||||||
elif o in ("-p", "--path"):
|
elif o in ("-p", "--path"):
|
||||||
os.environ["PATH"] = a + os.pathsep + os.environ["PATH"]
|
OPTIONS.search_path = a
|
||||||
path_specified = True
|
|
||||||
else:
|
else:
|
||||||
if extra_option_handler is None or not extra_option_handler(o, a):
|
if extra_option_handler is None or not extra_option_handler(o, a):
|
||||||
assert False, "unknown option \"%s\"" % (o,)
|
assert False, "unknown option \"%s\"" % (o,)
|
||||||
|
|
||||||
if not path_specified:
|
os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
|
||||||
os.environ["PATH"] = ("out/host/linux-x86/bin" + os.pathsep +
|
os.pathsep + os.environ["PATH"])
|
||||||
os.environ["PATH"])
|
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@@ -271,3 +302,111 @@ def Cleanup():
|
|||||||
shutil.rmtree(i)
|
shutil.rmtree(i)
|
||||||
else:
|
else:
|
||||||
os.remove(i)
|
os.remove(i)
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordManager(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.editor = os.getenv("EDITOR", None)
|
||||||
|
self.pwfile = os.getenv("ANDROID_PW_FILE", None)
|
||||||
|
|
||||||
|
def GetPasswords(self, items):
|
||||||
|
"""Get passwords corresponding to each string in 'items',
|
||||||
|
returning a dict. (The dict may have keys in addition to the
|
||||||
|
values in 'items'.)
|
||||||
|
|
||||||
|
Uses the passwords in $ANDROID_PW_FILE if available, letting the
|
||||||
|
user edit that file to add more needed passwords. If no editor is
|
||||||
|
available, or $ANDROID_PW_FILE isn't define, prompts the user
|
||||||
|
interactively in the ordinary way.
|
||||||
|
"""
|
||||||
|
|
||||||
|
current = self.ReadFile()
|
||||||
|
|
||||||
|
first = True
|
||||||
|
while True:
|
||||||
|
missing = []
|
||||||
|
for i in items:
|
||||||
|
if i not in current or not current[i]:
|
||||||
|
missing.append(i)
|
||||||
|
# Are all the passwords already in the file?
|
||||||
|
if not missing: return current
|
||||||
|
|
||||||
|
for i in missing:
|
||||||
|
current[i] = ""
|
||||||
|
|
||||||
|
if not first:
|
||||||
|
print "key file %s still missing some passwords." % (self.pwfile,)
|
||||||
|
answer = raw_input("try to edit again? [y]> ").strip()
|
||||||
|
if answer and answer[0] not in 'yY':
|
||||||
|
raise RuntimeError("key passwords unavailable")
|
||||||
|
first = False
|
||||||
|
|
||||||
|
current = self.UpdateAndReadFile(current)
|
||||||
|
|
||||||
|
def PromptResult(self, current):
|
||||||
|
"""Prompt the user to enter a value (password) for each key in
|
||||||
|
'current' whose value is fales. Returns a new dict with all the
|
||||||
|
values.
|
||||||
|
"""
|
||||||
|
result = {}
|
||||||
|
for k, v in sorted(current.iteritems()):
|
||||||
|
if v:
|
||||||
|
result[k] = v
|
||||||
|
else:
|
||||||
|
while True:
|
||||||
|
result[k] = getpass.getpass("Enter password for %s key> "
|
||||||
|
% (k,)).strip()
|
||||||
|
if result[k]: break
|
||||||
|
return result
|
||||||
|
|
||||||
|
def UpdateAndReadFile(self, current):
|
||||||
|
if not self.editor or not self.pwfile:
|
||||||
|
return self.PromptResult(current)
|
||||||
|
|
||||||
|
f = open(self.pwfile, "w")
|
||||||
|
os.chmod(self.pwfile, 0600)
|
||||||
|
f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
|
||||||
|
f.write("# (Additional spaces are harmless.)\n\n")
|
||||||
|
|
||||||
|
first_line = None
|
||||||
|
sorted = [(not v, k, v) for (k, v) in current.iteritems()]
|
||||||
|
sorted.sort()
|
||||||
|
for i, (_, k, v) in enumerate(sorted):
|
||||||
|
f.write("[[[ %s ]]] %s\n" % (v, k))
|
||||||
|
if not v and first_line is None:
|
||||||
|
# position cursor on first line with no password.
|
||||||
|
first_line = i + 4
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
|
||||||
|
_, _ = p.communicate()
|
||||||
|
|
||||||
|
return self.ReadFile()
|
||||||
|
|
||||||
|
def ReadFile(self):
|
||||||
|
result = {}
|
||||||
|
if self.pwfile is None: return result
|
||||||
|
try:
|
||||||
|
f = open(self.pwfile, "r")
|
||||||
|
for line in f:
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line[0] == '#': continue
|
||||||
|
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
|
||||||
|
if not m:
|
||||||
|
print "failed to parse password file: ", line
|
||||||
|
else:
|
||||||
|
result[m.group(2)] = m.group(1)
|
||||||
|
f.close()
|
||||||
|
except IOError, e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
print "error reading password file: ", str(e)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def ZipWriteStr(zip, filename, data, perms=0644):
|
||||||
|
# use a fixed timestamp so the output is repeatable.
|
||||||
|
zinfo = zipfile.ZipInfo(filename=filename,
|
||||||
|
date_time=(2009, 1, 1, 0, 0, 0))
|
||||||
|
zinfo.compress_type = zip.compression
|
||||||
|
zinfo.external_attr = perms << 16
|
||||||
|
zip.writestr(zinfo, data)
|
||||||
|
226
tools/releasetools/edify_generator.py
Normal file
226
tools/releasetools/edify_generator.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import common
|
||||||
|
|
||||||
|
class EdifyGenerator(object):
|
||||||
|
"""Class to generate scripts in the 'edify' recovery script language
|
||||||
|
used from donut onwards."""
|
||||||
|
|
||||||
|
def __init__(self, version):
|
||||||
|
self.script = []
|
||||||
|
self.mounts = set()
|
||||||
|
self.version = version
|
||||||
|
|
||||||
|
def MakeTemporary(self):
|
||||||
|
"""Make a temporary script object whose commands can latter be
|
||||||
|
appended to the parent script with AppendScript(). Used when the
|
||||||
|
caller wants to generate script commands out-of-order."""
|
||||||
|
x = EdifyGenerator(self.version)
|
||||||
|
x.mounts = self.mounts
|
||||||
|
return x
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _WordWrap(cmd, linelen=80):
|
||||||
|
"""'cmd' should be a function call with null characters after each
|
||||||
|
parameter (eg, "somefun(foo,\0bar,\0baz)"). This function wraps cmd
|
||||||
|
to a given line length, replacing nulls with spaces and/or newlines
|
||||||
|
to format it nicely."""
|
||||||
|
indent = cmd.index("(")+1
|
||||||
|
out = []
|
||||||
|
first = True
|
||||||
|
x = re.compile("^(.{,%d})\0" % (linelen-indent,))
|
||||||
|
while True:
|
||||||
|
if not first:
|
||||||
|
out.append(" " * indent)
|
||||||
|
first = False
|
||||||
|
m = x.search(cmd)
|
||||||
|
if not m:
|
||||||
|
parts = cmd.split("\0", 1)
|
||||||
|
out.append(parts[0]+"\n")
|
||||||
|
if len(parts) == 1:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
cmd = parts[1]
|
||||||
|
continue
|
||||||
|
out.append(m.group(1)+"\n")
|
||||||
|
cmd = cmd[m.end():]
|
||||||
|
|
||||||
|
return "".join(out).replace("\0", " ").rstrip("\n")
|
||||||
|
|
||||||
|
def AppendScript(self, other):
|
||||||
|
"""Append the contents of another script (which should be created
|
||||||
|
with temporary=True) to this one."""
|
||||||
|
self.script.extend(other.script)
|
||||||
|
|
||||||
|
def AssertSomeFingerprint(self, *fp):
|
||||||
|
"""Assert that the current system build fingerprint is one of *fp."""
|
||||||
|
if not fp:
|
||||||
|
raise ValueError("must specify some fingerprints")
|
||||||
|
cmd = ('assert(' +
|
||||||
|
' ||\0'.join([('file_getprop("/system/build.prop", '
|
||||||
|
'"ro.build.fingerprint") == "%s"')
|
||||||
|
% i for i in fp]) +
|
||||||
|
');')
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def AssertOlderBuild(self, timestamp):
|
||||||
|
"""Assert that the build on the device is older (or the same as)
|
||||||
|
the given timestamp."""
|
||||||
|
self.script.append(('assert(!less_than_int(%s, '
|
||||||
|
'getprop("ro.build.date.utc")));') % (timestamp,))
|
||||||
|
|
||||||
|
def AssertDevice(self, device):
|
||||||
|
"""Assert that the device identifier is the given string."""
|
||||||
|
cmd = ('assert(getprop("ro.product.device") == "%s" ||\0'
|
||||||
|
'getprop("ro.build.product") == "%s");' % (device, device))
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def AssertSomeBootloader(self, *bootloaders):
|
||||||
|
"""Asert that the bootloader version is one of *bootloaders."""
|
||||||
|
cmd = ("assert(" +
|
||||||
|
" ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,)
|
||||||
|
for b in bootloaders]) +
|
||||||
|
");")
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def ShowProgress(self, frac, dur):
|
||||||
|
"""Update the progress bar, advancing it over 'frac' over the next
|
||||||
|
'dur' seconds."""
|
||||||
|
self.script.append("show_progress(%f, %d);" % (frac, int(dur)))
|
||||||
|
|
||||||
|
def PatchCheck(self, filename, *sha1):
|
||||||
|
"""Check that the given file (or MTD reference) has one of the
|
||||||
|
given *sha1 hashes."""
|
||||||
|
self.script.append('assert(apply_patch_check("%s"' % (filename,) +
|
||||||
|
"".join([', "%s"' % (i,) for i in sha1]) +
|
||||||
|
'));')
|
||||||
|
|
||||||
|
def CacheFreeSpaceCheck(self, amount):
|
||||||
|
"""Check that there's at least 'amount' space that can be made
|
||||||
|
available on /cache."""
|
||||||
|
self.script.append("assert(apply_patch_space(%d));" % (amount,))
|
||||||
|
|
||||||
|
def Mount(self, kind, what, path):
|
||||||
|
"""Mount the given 'what' at the given path. 'what' should be a
|
||||||
|
partition name if kind is "MTD", or a block device if kind is
|
||||||
|
"vfat". No other values of 'kind' are supported."""
|
||||||
|
self.script.append('mount("%s", "%s", "%s");' % (kind, what, path))
|
||||||
|
self.mounts.add(path)
|
||||||
|
|
||||||
|
def UnpackPackageDir(self, src, dst):
|
||||||
|
"""Unpack a given directory from the OTA package into the given
|
||||||
|
destination directory."""
|
||||||
|
self.script.append('package_extract_dir("%s", "%s");' % (src, dst))
|
||||||
|
|
||||||
|
def Comment(self, comment):
|
||||||
|
"""Write a comment into the update script."""
|
||||||
|
self.script.append("")
|
||||||
|
for i in comment.split("\n"):
|
||||||
|
self.script.append("# " + i)
|
||||||
|
self.script.append("")
|
||||||
|
|
||||||
|
def Print(self, message):
|
||||||
|
"""Log a message to the screen (if the logs are visible)."""
|
||||||
|
self.script.append('ui_print("%s");' % (message,))
|
||||||
|
|
||||||
|
def FormatPartition(self, partition):
|
||||||
|
"""Format the given MTD partition."""
|
||||||
|
self.script.append('format("MTD", "%s");' % (partition,))
|
||||||
|
|
||||||
|
def DeleteFiles(self, file_list):
|
||||||
|
"""Delete all files in file_list."""
|
||||||
|
if not file_list: return
|
||||||
|
cmd = "delete(" + ",\0".join(['"%s"' % (i,) for i in file_list]) + ");"
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
|
||||||
|
"""Apply binary patches (in *patchpairs) to the given srcfile to
|
||||||
|
produce tgtfile (which may be "-" to indicate overwriting the
|
||||||
|
source file."""
|
||||||
|
if len(patchpairs) % 2 != 0 or len(patchpairs) == 0:
|
||||||
|
raise ValueError("bad patches given to ApplyPatch")
|
||||||
|
cmd = ['apply_patch("%s",\0"%s",\0%s,\0%d'
|
||||||
|
% (srcfile, tgtfile, tgtsha1, tgtsize)]
|
||||||
|
for i in range(0, len(patchpairs), 2):
|
||||||
|
cmd.append(',\0"%s:%s"' % patchpairs[i:i+2])
|
||||||
|
cmd.append(');')
|
||||||
|
cmd = "".join(cmd)
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def WriteFirmwareImage(self, kind, fn):
|
||||||
|
"""Arrange to update the given firmware image (kind must be
|
||||||
|
"hboot" or "radio") when recovery finishes."""
|
||||||
|
if self.version == 1:
|
||||||
|
self.script.append(
|
||||||
|
('assert(package_extract_file("%(fn)s", "/tmp/%(kind)s.img"),\n'
|
||||||
|
' write_firmware_image("/tmp/%(kind)s.img", "%(kind)s"));')
|
||||||
|
% {'kind': kind, 'fn': fn})
|
||||||
|
else:
|
||||||
|
self.script.append(
|
||||||
|
'write_firmware_image("PACKAGE:%s", "%s");' % (fn, kind))
|
||||||
|
|
||||||
|
def WriteRawImage(self, partition, fn):
|
||||||
|
"""Write the given package file into the given MTD partition."""
|
||||||
|
self.script.append(
|
||||||
|
('assert(package_extract_file("%(fn)s", "/tmp/%(partition)s.img"),\n'
|
||||||
|
' write_raw_image("/tmp/%(partition)s.img", "%(partition)s"),\n'
|
||||||
|
' delete("/tmp/%(partition)s.img"));')
|
||||||
|
% {'partition': partition, 'fn': fn})
|
||||||
|
|
||||||
|
def SetPermissions(self, fn, uid, gid, mode):
|
||||||
|
"""Set file ownership and permissions."""
|
||||||
|
self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn))
|
||||||
|
|
||||||
|
def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode):
|
||||||
|
"""Recursively set path ownership and permissions."""
|
||||||
|
self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");'
|
||||||
|
% (uid, gid, dmode, fmode, fn))
|
||||||
|
|
||||||
|
def MakeSymlinks(self, symlink_list):
|
||||||
|
"""Create symlinks, given a list of (dest, link) pairs."""
|
||||||
|
by_dest = {}
|
||||||
|
for d, l in symlink_list:
|
||||||
|
by_dest.setdefault(d, []).append(l)
|
||||||
|
|
||||||
|
for dest, links in sorted(by_dest.iteritems()):
|
||||||
|
cmd = ('symlink("%s", ' % (dest,) +
|
||||||
|
",\0".join(['"' + i + '"' for i in sorted(links)]) + ");")
|
||||||
|
self.script.append(self._WordWrap(cmd))
|
||||||
|
|
||||||
|
def AppendExtra(self, extra):
|
||||||
|
"""Append text verbatim to the output script."""
|
||||||
|
self.script.append(extra)
|
||||||
|
|
||||||
|
def AddToZip(self, input_zip, output_zip, input_path=None):
|
||||||
|
"""Write the accumulated script to the output_zip file. input_zip
|
||||||
|
is used as the source for the 'updater' binary needed to run
|
||||||
|
script. If input_path is not None, it will be used as a local
|
||||||
|
path for the binary instead of input_zip."""
|
||||||
|
|
||||||
|
for p in sorted(self.mounts):
|
||||||
|
self.script.append('unmount("%s");' % (p,))
|
||||||
|
|
||||||
|
common.ZipWriteStr(output_zip, "META-INF/com/google/android/updater-script",
|
||||||
|
"\n".join(self.script) + "\n")
|
||||||
|
|
||||||
|
if input_path is None:
|
||||||
|
data = input_zip.read("OTA/bin/updater")
|
||||||
|
else:
|
||||||
|
data = open(os.path.join(input_path, "updater")).read()
|
||||||
|
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
|
||||||
|
data, perms=0755)
|
@@ -96,7 +96,7 @@ def AddSystem(output_zip):
|
|||||||
img.close()
|
img.close()
|
||||||
|
|
||||||
common.CheckSize(data, "system.img")
|
common.CheckSize(data, "system.img")
|
||||||
output_zip.writestr("system.img", data)
|
common.ZipWriteStr(output_zip, "system.img", data)
|
||||||
|
|
||||||
|
|
||||||
def CopyInfo(output_zip):
|
def CopyInfo(output_zip):
|
||||||
|
@@ -33,6 +33,22 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
|
|||||||
Generate an incremental OTA using the given target-files zip as
|
Generate an incremental OTA using the given target-files zip as
|
||||||
the starting build.
|
the starting build.
|
||||||
|
|
||||||
|
-w (--wipe_user_data)
|
||||||
|
Generate an OTA package that will wipe the user data partition
|
||||||
|
when installed.
|
||||||
|
|
||||||
|
-n (--no_prereq)
|
||||||
|
Omit the timestamp prereq check normally included at the top of
|
||||||
|
the build scripts (used for developer OTA packages which
|
||||||
|
legitimately need to go back and forth).
|
||||||
|
|
||||||
|
-e (--extra_script) <file>
|
||||||
|
Insert the contents of file at the end of the update script.
|
||||||
|
|
||||||
|
-m (--script_mode) <mode>
|
||||||
|
Specify 'amend' or 'edify' scripts, or 'auto' to pick
|
||||||
|
automatically (this is the default).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
@@ -51,6 +67,8 @@ import time
|
|||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
import common
|
import common
|
||||||
|
import amend_generator
|
||||||
|
import edify_generator
|
||||||
|
|
||||||
OPTIONS = common.OPTIONS
|
OPTIONS = common.OPTIONS
|
||||||
OPTIONS.package_key = "build/target/product/security/testkey"
|
OPTIONS.package_key = "build/target/product/security/testkey"
|
||||||
@@ -58,6 +76,10 @@ OPTIONS.incremental_source = None
|
|||||||
OPTIONS.require_verbatim = set()
|
OPTIONS.require_verbatim = set()
|
||||||
OPTIONS.prohibit_verbatim = set(("system/build.prop",))
|
OPTIONS.prohibit_verbatim = set(("system/build.prop",))
|
||||||
OPTIONS.patch_threshold = 0.95
|
OPTIONS.patch_threshold = 0.95
|
||||||
|
OPTIONS.wipe_user_data = False
|
||||||
|
OPTIONS.omit_prereq = False
|
||||||
|
OPTIONS.extra_script = None
|
||||||
|
OPTIONS.script_mode = 'auto'
|
||||||
|
|
||||||
def MostPopularKey(d, default):
|
def MostPopularKey(d, default):
|
||||||
"""Given a dict, return the key corresponding to the largest
|
"""Given a dict, return the key corresponding to the largest
|
||||||
@@ -178,11 +200,10 @@ class Item:
|
|||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def SetPermissions(self, script, renamer=lambda x: x):
|
def SetPermissions(self, script):
|
||||||
"""Append set_perm/set_perm_recursive commands to 'script' to
|
"""Append set_perm/set_perm_recursive commands to 'script' to
|
||||||
set all permissions, users, and groups for the tree of files
|
set all permissions, users, and groups for the tree of files
|
||||||
rooted at 'self'. 'renamer' turns the filenames stored in the
|
rooted at 'self'."""
|
||||||
tree of Items into the strings used in the script."""
|
|
||||||
|
|
||||||
self.CountChildMetadata()
|
self.CountChildMetadata()
|
||||||
|
|
||||||
@@ -193,22 +214,19 @@ class Item:
|
|||||||
# supposed to be something different.
|
# supposed to be something different.
|
||||||
if item.dir:
|
if item.dir:
|
||||||
if current != item.best_subtree:
|
if current != item.best_subtree:
|
||||||
script.append("set_perm_recursive %d %d 0%o 0%o %s" %
|
script.SetPermissionsRecursive("/"+item.name, *item.best_subtree)
|
||||||
(item.best_subtree + (renamer(item.name),)))
|
|
||||||
current = item.best_subtree
|
current = item.best_subtree
|
||||||
|
|
||||||
if item.uid != current[0] or item.gid != current[1] or \
|
if item.uid != current[0] or item.gid != current[1] or \
|
||||||
item.mode != current[2]:
|
item.mode != current[2]:
|
||||||
script.append("set_perm %d %d 0%o %s" %
|
script.SetPermissions("/"+item.name, item.uid, item.gid, item.mode)
|
||||||
(item.uid, item.gid, item.mode, renamer(item.name)))
|
|
||||||
|
|
||||||
for i in item.children:
|
for i in item.children:
|
||||||
recurse(i, current)
|
recurse(i, current)
|
||||||
else:
|
else:
|
||||||
if item.uid != current[0] or item.gid != current[1] or \
|
if item.uid != current[0] or item.gid != current[1] or \
|
||||||
item.mode != current[3]:
|
item.mode != current[3]:
|
||||||
script.append("set_perm %d %d 0%o %s" %
|
script.SetPermissions("/"+item.name, item.uid, item.gid, item.mode)
|
||||||
(item.uid, item.gid, item.mode, renamer(item.name)))
|
|
||||||
|
|
||||||
recurse(self, (-1, -1, -1, -1))
|
recurse(self, (-1, -1, -1, -1))
|
||||||
|
|
||||||
@@ -230,7 +248,7 @@ def CopySystemFiles(input_zip, output_zip=None,
|
|||||||
basefilename = info.filename[7:]
|
basefilename = info.filename[7:]
|
||||||
if IsSymlink(info):
|
if IsSymlink(info):
|
||||||
symlinks.append((input_zip.read(info.filename),
|
symlinks.append((input_zip.read(info.filename),
|
||||||
"SYSTEM:" + basefilename))
|
"/system/" + basefilename))
|
||||||
else:
|
else:
|
||||||
info2 = copy.copy(info)
|
info2 = copy.copy(info)
|
||||||
fn = info2.filename = "system/" + basefilename
|
fn = info2.filename = "system/" + basefilename
|
||||||
@@ -251,14 +269,6 @@ def CopySystemFiles(input_zip, output_zip=None,
|
|||||||
return symlinks
|
return symlinks
|
||||||
|
|
||||||
|
|
||||||
def AddScript(script, output_zip):
|
|
||||||
now = time.localtime()
|
|
||||||
i = zipfile.ZipInfo("META-INF/com/google/android/update-script",
|
|
||||||
(now.tm_year, now.tm_mon, now.tm_mday,
|
|
||||||
now.tm_hour, now.tm_min, now.tm_sec))
|
|
||||||
output_zip.writestr(i, "\n".join(script) + "\n")
|
|
||||||
|
|
||||||
|
|
||||||
def SignOutput(temp_zip_name, output_zip_name):
|
def SignOutput(temp_zip_name, output_zip_name):
|
||||||
key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
|
key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
|
||||||
pw = key_passwords[OPTIONS.package_key]
|
pw = key_passwords[OPTIONS.package_key]
|
||||||
@@ -266,89 +276,75 @@ def SignOutput(temp_zip_name, output_zip_name):
|
|||||||
common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw)
|
common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw)
|
||||||
|
|
||||||
|
|
||||||
def SubstituteRoot(s):
|
|
||||||
if s == "system": return "SYSTEM:"
|
|
||||||
assert s.startswith("system/")
|
|
||||||
return "SYSTEM:" + s[7:]
|
|
||||||
|
|
||||||
def FixPermissions(script):
|
def FixPermissions(script):
|
||||||
Item.GetMetadata()
|
Item.GetMetadata()
|
||||||
root = Item.Get("system")
|
root = Item.Get("system")
|
||||||
root.SetPermissions(script, renamer=SubstituteRoot)
|
root.SetPermissions(script)
|
||||||
|
|
||||||
def DeleteFiles(script, to_delete):
|
|
||||||
line = []
|
|
||||||
t = 0
|
|
||||||
for i in to_delete:
|
|
||||||
line.append(i)
|
|
||||||
t += len(i) + 1
|
|
||||||
if t > 80:
|
|
||||||
script.append("delete " + " ".join(line))
|
|
||||||
line = []
|
|
||||||
t = 0
|
|
||||||
if line:
|
|
||||||
script.append("delete " + " ".join(line))
|
|
||||||
|
|
||||||
def AppendAssertions(script, input_zip):
|
def AppendAssertions(script, input_zip):
|
||||||
script.append('assert compatible_with("0.2") == "true"')
|
|
||||||
|
|
||||||
device = GetBuildProp("ro.product.device", input_zip)
|
device = GetBuildProp("ro.product.device", input_zip)
|
||||||
script.append('assert getprop("ro.product.device") == "%s" || '
|
script.AssertDevice(device)
|
||||||
'getprop("ro.build.product") == "%s"' % (device, device))
|
|
||||||
|
|
||||||
info = input_zip.read("OTA/android-info.txt")
|
info = input_zip.read("OTA/android-info.txt")
|
||||||
m = re.search(r"require\s+version-bootloader\s*=\s*(\S+)", info)
|
m = re.search(r"require\s+version-bootloader\s*=\s*(\S+)", info)
|
||||||
if not m:
|
if m:
|
||||||
raise ExternalError("failed to find required bootloaders in "
|
bootloaders = m.group(1).split("|")
|
||||||
"android-info.txt")
|
script.AssertSomeBootloader(*bootloaders)
|
||||||
bootloaders = m.group(1).split("|")
|
|
||||||
script.append("assert " +
|
|
||||||
" || ".join(['getprop("ro.bootloader") == "%s"' % (b,)
|
|
||||||
for b in bootloaders]))
|
|
||||||
|
|
||||||
|
|
||||||
def IncludeBinary(name, input_zip, output_zip):
|
|
||||||
try:
|
|
||||||
data = input_zip.read(os.path.join("OTA/bin", name))
|
|
||||||
output_zip.writestr(name, data)
|
|
||||||
except IOError:
|
|
||||||
raise ExternalError('unable to include device binary "%s"' % (name,))
|
|
||||||
|
|
||||||
|
|
||||||
def WriteFullOTAPackage(input_zip, output_zip):
|
def WriteFullOTAPackage(input_zip, output_zip):
|
||||||
script = []
|
if OPTIONS.script_mode in ("amend", "auto"):
|
||||||
|
script = amend_generator.AmendGenerator()
|
||||||
|
else:
|
||||||
|
# TODO: how to determine this? We don't know what version it will
|
||||||
|
# be installed on top of. For now, we expect the API just won't
|
||||||
|
# change very often.
|
||||||
|
script = edify_generator.EdifyGenerator(1)
|
||||||
|
|
||||||
ts = GetBuildProp("ro.build.date.utc", input_zip)
|
if not OPTIONS.omit_prereq:
|
||||||
script.append("run_program PACKAGE:check_prereq %s" % (ts,))
|
ts = GetBuildProp("ro.build.date.utc", input_zip)
|
||||||
IncludeBinary("check_prereq", input_zip, output_zip)
|
script.AssertOlderBuild(ts)
|
||||||
|
|
||||||
AppendAssertions(script, input_zip)
|
AppendAssertions(script, input_zip)
|
||||||
|
|
||||||
script.append("format BOOT:")
|
script.ShowProgress(0.1, 0)
|
||||||
script.append("show_progress 0.1 0")
|
|
||||||
|
|
||||||
output_zip.writestr("radio.img", input_zip.read("RADIO/image"))
|
try:
|
||||||
script.append("write_radio_image PACKAGE:radio.img")
|
common.ZipWriteStr(output_zip, "radio.img", input_zip.read("RADIO/image"))
|
||||||
script.append("show_progress 0.5 0")
|
script.WriteFirmwareImage("radio", "radio.img")
|
||||||
|
except KeyError:
|
||||||
|
print "warning: no radio image in input target_files; not flashing radio"
|
||||||
|
|
||||||
script.append("format SYSTEM:")
|
script.ShowProgress(0.5, 0)
|
||||||
script.append("copy_dir PACKAGE:system SYSTEM:")
|
|
||||||
|
if OPTIONS.wipe_user_data:
|
||||||
|
script.FormatPartition("userdata")
|
||||||
|
|
||||||
|
script.FormatPartition("system")
|
||||||
|
script.Mount("MTD", "system", "/system")
|
||||||
|
script.UnpackPackageDir("system", "/system")
|
||||||
|
|
||||||
symlinks = CopySystemFiles(input_zip, output_zip)
|
symlinks = CopySystemFiles(input_zip, output_zip)
|
||||||
script.extend(["symlink %s %s" % s for s in symlinks])
|
script.MakeSymlinks(symlinks)
|
||||||
|
|
||||||
common.BuildAndAddBootableImage(os.path.join(OPTIONS.input_tmp, "RECOVERY"),
|
if common.BuildAndAddBootableImage(
|
||||||
"system/recovery.img", output_zip)
|
os.path.join(OPTIONS.input_tmp, "RECOVERY"),
|
||||||
Item.Get("system/recovery.img", dir=False)
|
"system/recovery.img", output_zip):
|
||||||
|
Item.Get("system/recovery.img", dir=False)
|
||||||
|
|
||||||
FixPermissions(script)
|
FixPermissions(script)
|
||||||
|
|
||||||
common.AddBoot(output_zip)
|
common.AddBoot(output_zip)
|
||||||
script.append("show_progress 0.2 0")
|
script.ShowProgress(0.2, 0)
|
||||||
script.append("write_raw_image PACKAGE:boot.img BOOT:")
|
|
||||||
script.append("show_progress 0.2 10")
|
|
||||||
|
|
||||||
AddScript(script, output_zip)
|
script.WriteRawImage("boot", "boot.img")
|
||||||
|
script.ShowProgress(0.2, 10)
|
||||||
|
|
||||||
|
if OPTIONS.extra_script is not None:
|
||||||
|
script.AppendExtra(OPTIONS.extra_script)
|
||||||
|
|
||||||
|
script.AddToZip(input_zip, output_zip)
|
||||||
|
|
||||||
|
|
||||||
class File(object):
|
class File(object):
|
||||||
@@ -365,7 +361,7 @@ class File(object):
|
|||||||
return t
|
return t
|
||||||
|
|
||||||
def AddToZip(self, z):
|
def AddToZip(self, z):
|
||||||
z.writestr(self.name, self.data)
|
common.ZipWriteStr(z, self.name, self.data)
|
||||||
|
|
||||||
|
|
||||||
def LoadSystemFiles(z):
|
def LoadSystemFiles(z):
|
||||||
@@ -380,8 +376,11 @@ def LoadSystemFiles(z):
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
def Difference(tf, sf):
|
def Difference(tf, sf, diff_program):
|
||||||
"""Return the patch (as a string of data) needed to turn sf into tf."""
|
"""Return the patch (as a string of data) needed to turn sf into tf.
|
||||||
|
diff_program is the name of an external program (or list, if
|
||||||
|
additional arguments are desired) to run to generate the diff.
|
||||||
|
"""
|
||||||
|
|
||||||
ttemp = tf.WriteToTemp()
|
ttemp = tf.WriteToTemp()
|
||||||
stemp = sf.WriteToTemp()
|
stemp = sf.WriteToTemp()
|
||||||
@@ -390,13 +389,21 @@ def Difference(tf, sf):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
ptemp = tempfile.NamedTemporaryFile()
|
ptemp = tempfile.NamedTemporaryFile()
|
||||||
p = common.Run(["bsdiff", stemp.name, ttemp.name, ptemp.name])
|
if isinstance(diff_program, list):
|
||||||
|
cmd = copy.copy(diff_program)
|
||||||
|
else:
|
||||||
|
cmd = [diff_program]
|
||||||
|
cmd.append(stemp.name)
|
||||||
|
cmd.append(ttemp.name)
|
||||||
|
cmd.append(ptemp.name)
|
||||||
|
p = common.Run(cmd)
|
||||||
_, err = p.communicate()
|
_, err = p.communicate()
|
||||||
if err:
|
if err or p.returncode != 0:
|
||||||
raise ExternalError("failure running bsdiff:\n%s\n" % (err,))
|
print "WARNING: failure running %s:\n%s\n" % (diff_program, err)
|
||||||
|
return None
|
||||||
diff = ptemp.read()
|
diff = ptemp.read()
|
||||||
ptemp.close()
|
|
||||||
finally:
|
finally:
|
||||||
|
ptemp.close()
|
||||||
stemp.close()
|
stemp.close()
|
||||||
ttemp.close()
|
ttemp.close()
|
||||||
|
|
||||||
@@ -411,12 +418,42 @@ def GetBuildProp(property, z):
|
|||||||
return bp
|
return bp
|
||||||
m = re.search(re.escape(property) + r"=(.*)\n", bp)
|
m = re.search(re.escape(property) + r"=(.*)\n", bp)
|
||||||
if not m:
|
if not m:
|
||||||
raise ExternalException("couldn't find %s in build.prop" % (property,))
|
raise common.ExternalError("couldn't find %s in build.prop" % (property,))
|
||||||
return m.group(1).strip()
|
return m.group(1).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def GetRecoveryAPIVersion(zip):
|
||||||
|
"""Returns the version of the recovery API. Version 0 is the older
|
||||||
|
amend code (no separate binary)."""
|
||||||
|
try:
|
||||||
|
version = zip.read("META/recovery-api-version.txt")
|
||||||
|
return int(version)
|
||||||
|
except KeyError:
|
||||||
|
try:
|
||||||
|
# version one didn't have the recovery-api-version.txt file, but
|
||||||
|
# it did include an updater binary.
|
||||||
|
zip.getinfo("OTA/bin/updater")
|
||||||
|
return 1
|
||||||
|
except KeyError:
|
||||||
|
return 0
|
||||||
|
|
||||||
def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
||||||
script = []
|
source_version = GetRecoveryAPIVersion(source_zip)
|
||||||
|
|
||||||
|
if OPTIONS.script_mode == 'amend':
|
||||||
|
script = amend_generator.AmendGenerator()
|
||||||
|
elif OPTIONS.script_mode == 'edify':
|
||||||
|
if source_version == 0:
|
||||||
|
print ("WARNING: generating edify script for a source that "
|
||||||
|
"can't install it.")
|
||||||
|
script = edify_generator.EdifyGenerator(source_version)
|
||||||
|
elif OPTIONS.script_mode == 'auto':
|
||||||
|
if source_version > 0:
|
||||||
|
script = edify_generator.EdifyGenerator(source_version)
|
||||||
|
else:
|
||||||
|
script = amend_generator.AmendGenerator()
|
||||||
|
else:
|
||||||
|
raise ValueError('unknown script mode "%s"' % (OPTIONS.script_mode,))
|
||||||
|
|
||||||
print "Loading target..."
|
print "Loading target..."
|
||||||
target_data = LoadSystemFiles(target_zip)
|
target_data = LoadSystemFiles(target_zip)
|
||||||
@@ -433,20 +470,24 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
|||||||
if sf is None or fn in OPTIONS.require_verbatim:
|
if sf is None or fn in OPTIONS.require_verbatim:
|
||||||
# This file should be included verbatim
|
# This file should be included verbatim
|
||||||
if fn in OPTIONS.prohibit_verbatim:
|
if fn in OPTIONS.prohibit_verbatim:
|
||||||
raise ExternalError("\"%s\" must be sent verbatim" % (fn,))
|
raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
|
||||||
print "send", fn, "verbatim"
|
print "send", fn, "verbatim"
|
||||||
tf.AddToZip(output_zip)
|
tf.AddToZip(output_zip)
|
||||||
verbatim_targets.append((fn, tf.size))
|
verbatim_targets.append((fn, tf.size))
|
||||||
elif tf.sha1 != sf.sha1:
|
elif tf.sha1 != sf.sha1:
|
||||||
# File is different; consider sending as a patch
|
# File is different; consider sending as a patch
|
||||||
d = Difference(tf, sf)
|
diff_method = "bsdiff"
|
||||||
print fn, tf.size, len(d), (float(len(d)) / tf.size)
|
if tf.name.endswith(".gz"):
|
||||||
if len(d) > tf.size * OPTIONS.patch_threshold:
|
diff_method = "imgdiff"
|
||||||
|
d = Difference(tf, sf, diff_method)
|
||||||
|
if d is not None:
|
||||||
|
print fn, tf.size, len(d), (float(len(d)) / tf.size)
|
||||||
|
if d is None or len(d) > tf.size * OPTIONS.patch_threshold:
|
||||||
# patch is almost as big as the file; don't bother patching
|
# patch is almost as big as the file; don't bother patching
|
||||||
tf.AddToZip(output_zip)
|
tf.AddToZip(output_zip)
|
||||||
verbatim_targets.append((fn, tf.size))
|
verbatim_targets.append((fn, tf.size))
|
||||||
else:
|
else:
|
||||||
output_zip.writestr("patch/" + fn + ".p", d)
|
common.ZipWriteStr(output_zip, "patch/" + fn + ".p", d)
|
||||||
patch_list.append((fn, tf, sf, tf.size))
|
patch_list.append((fn, tf, sf, tf.size))
|
||||||
largest_source_size = max(largest_source_size, sf.size)
|
largest_source_size = max(largest_source_size, sf.size)
|
||||||
else:
|
else:
|
||||||
@@ -459,23 +500,24 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
|||||||
source_fp = GetBuildProp("ro.build.fingerprint", source_zip)
|
source_fp = GetBuildProp("ro.build.fingerprint", source_zip)
|
||||||
target_fp = GetBuildProp("ro.build.fingerprint", target_zip)
|
target_fp = GetBuildProp("ro.build.fingerprint", target_zip)
|
||||||
|
|
||||||
script.append(('assert file_contains("SYSTEM:build.prop", '
|
script.Mount("MTD", "system", "/system")
|
||||||
'"ro.build.fingerprint=%s") == "true" || '
|
script.AssertSomeFingerprint(source_fp, target_fp)
|
||||||
'file_contains("SYSTEM:build.prop", '
|
|
||||||
'"ro.build.fingerprint=%s") == "true"') %
|
|
||||||
(source_fp, target_fp))
|
|
||||||
|
|
||||||
source_boot = common.BuildBootableImage(
|
source_boot = File("/tmp/boot.img",
|
||||||
os.path.join(OPTIONS.source_tmp, "BOOT"))
|
common.BuildBootableImage(
|
||||||
target_boot = common.BuildBootableImage(
|
os.path.join(OPTIONS.source_tmp, "BOOT")))
|
||||||
os.path.join(OPTIONS.target_tmp, "BOOT"))
|
target_boot = File("/tmp/boot.img",
|
||||||
updating_boot = (source_boot != target_boot)
|
common.BuildBootableImage(
|
||||||
|
os.path.join(OPTIONS.target_tmp, "BOOT")))
|
||||||
|
updating_boot = (source_boot.data != target_boot.data)
|
||||||
|
|
||||||
source_recovery = common.BuildBootableImage(
|
source_recovery = File("system/recovery.img",
|
||||||
os.path.join(OPTIONS.source_tmp, "RECOVERY"))
|
common.BuildBootableImage(
|
||||||
target_recovery = common.BuildBootableImage(
|
os.path.join(OPTIONS.source_tmp, "RECOVERY")))
|
||||||
os.path.join(OPTIONS.target_tmp, "RECOVERY"))
|
target_recovery = File("system/recovery.img",
|
||||||
updating_recovery = (source_recovery != target_recovery)
|
common.BuildBootableImage(
|
||||||
|
os.path.join(OPTIONS.target_tmp, "RECOVERY")))
|
||||||
|
updating_recovery = (source_recovery.data != target_recovery.data)
|
||||||
|
|
||||||
source_radio = source_zip.read("RADIO/image")
|
source_radio = source_zip.read("RADIO/image")
|
||||||
target_radio = target_zip.read("RADIO/image")
|
target_radio = target_zip.read("RADIO/image")
|
||||||
@@ -491,65 +533,110 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
|||||||
|
|
||||||
AppendAssertions(script, target_zip)
|
AppendAssertions(script, target_zip)
|
||||||
|
|
||||||
|
script.Print("Verifying current system...")
|
||||||
|
|
||||||
pb_verify = progress_bar_total * 0.3 * \
|
pb_verify = progress_bar_total * 0.3 * \
|
||||||
(total_patched_size /
|
(total_patched_size /
|
||||||
float(total_patched_size+total_verbatim_size))
|
float(total_patched_size+total_verbatim_size+1))
|
||||||
|
|
||||||
for i, (fn, tf, sf, size) in enumerate(patch_list):
|
for i, (fn, tf, sf, size) in enumerate(patch_list):
|
||||||
if i % 5 == 0:
|
if i % 5 == 0:
|
||||||
next_sizes = sum([i[3] for i in patch_list[i:i+5]])
|
next_sizes = sum([i[3] for i in patch_list[i:i+5]])
|
||||||
script.append("show_progress %f 1" %
|
script.ShowProgress(next_sizes * pb_verify / (total_patched_size+1), 1)
|
||||||
(next_sizes * pb_verify / total_patched_size,))
|
|
||||||
script.append("run_program PACKAGE:applypatch -c /%s %s %s" %
|
|
||||||
(fn, tf.sha1, sf.sha1))
|
|
||||||
|
|
||||||
if patch_list:
|
script.PatchCheck("/"+fn, tf.sha1, sf.sha1)
|
||||||
script.append("run_program PACKAGE:applypatch -s %d" %
|
|
||||||
(largest_source_size,))
|
|
||||||
script.append("copy_dir PACKAGE:patch CACHE:../tmp/patchtmp")
|
|
||||||
IncludeBinary("applypatch", target_zip, output_zip)
|
|
||||||
|
|
||||||
script.append("\n# ---- start making changes here\n")
|
if updating_recovery:
|
||||||
|
d = Difference(target_recovery, source_recovery, "imgdiff")
|
||||||
|
print "recovery target: %d source: %d diff: %d" % (
|
||||||
|
target_recovery.size, source_recovery.size, len(d))
|
||||||
|
|
||||||
DeleteFiles(script, [SubstituteRoot(i[0]) for i in verbatim_targets])
|
common.ZipWriteStr(output_zip, "patch/recovery.img.p", d)
|
||||||
|
|
||||||
|
script.PatchCheck("MTD:recovery:%d:%s:%d:%s" %
|
||||||
|
(source_recovery.size, source_recovery.sha1,
|
||||||
|
target_recovery.size, target_recovery.sha1))
|
||||||
|
|
||||||
if updating_boot:
|
if updating_boot:
|
||||||
script.append("format BOOT:")
|
d = Difference(target_boot, source_boot, "imgdiff")
|
||||||
output_zip.writestr("boot.img", target_boot)
|
print "boot target: %d source: %d diff: %d" % (
|
||||||
|
target_boot.size, source_boot.size, len(d))
|
||||||
|
|
||||||
|
common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
|
||||||
|
|
||||||
|
script.PatchCheck("MTD:boot:%d:%s:%d:%s" %
|
||||||
|
(source_boot.size, source_boot.sha1,
|
||||||
|
target_boot.size, target_boot.sha1))
|
||||||
|
|
||||||
|
if patch_list or updating_recovery or updating_boot:
|
||||||
|
script.CacheFreeSpaceCheck(largest_source_size)
|
||||||
|
script.Print("Unpacking patches...")
|
||||||
|
script.UnpackPackageDir("patch", "/tmp/patchtmp")
|
||||||
|
|
||||||
|
script.Comment("---- start making changes here ----")
|
||||||
|
|
||||||
|
if OPTIONS.wipe_user_data:
|
||||||
|
script.Print("Erasing user data...")
|
||||||
|
script.FormatPartition("userdata")
|
||||||
|
|
||||||
|
script.Print("Removing unneeded files...")
|
||||||
|
script.DeleteFiles(["/"+i[0] for i in verbatim_targets] +
|
||||||
|
["/"+i for i in sorted(source_data)
|
||||||
|
if i not in target_data])
|
||||||
|
|
||||||
|
if updating_boot:
|
||||||
|
# Produce the boot image by applying a patch to the current
|
||||||
|
# contents of the boot partition, and write it back to the
|
||||||
|
# partition.
|
||||||
|
script.Print("Patching boot image...")
|
||||||
|
script.ApplyPatch("MTD:boot:%d:%s:%d:%s"
|
||||||
|
% (source_boot.size, source_boot.sha1,
|
||||||
|
target_boot.size, target_boot.sha1),
|
||||||
|
"-",
|
||||||
|
target_boot.size, target_boot.sha1,
|
||||||
|
source_boot.sha1, "/tmp/patchtmp/boot.img.p")
|
||||||
print "boot image changed; including."
|
print "boot image changed; including."
|
||||||
else:
|
else:
|
||||||
print "boot image unchanged; skipping."
|
print "boot image unchanged; skipping."
|
||||||
|
|
||||||
if updating_recovery:
|
if updating_recovery:
|
||||||
output_zip.writestr("system/recovery.img", target_recovery)
|
# Produce /system/recovery.img by applying a patch to the current
|
||||||
|
# contents of the recovery partition.
|
||||||
|
script.Print("Patching recovery image...")
|
||||||
|
script.ApplyPatch("MTD:recovery:%d:%s:%d:%s"
|
||||||
|
% (source_recovery.size, source_recovery.sha1,
|
||||||
|
target_recovery.size, target_recovery.sha1),
|
||||||
|
"/system/recovery.img",
|
||||||
|
target_recovery.size, target_recovery.sha1,
|
||||||
|
source_recovery.sha1, "/tmp/patchtmp/recovery.img.p")
|
||||||
print "recovery image changed; including."
|
print "recovery image changed; including."
|
||||||
else:
|
else:
|
||||||
print "recovery image unchanged; skipping."
|
print "recovery image unchanged; skipping."
|
||||||
|
|
||||||
if updating_radio:
|
if updating_radio:
|
||||||
script.append("show_progress 0.3 10")
|
script.ShowProgress(0.3, 10)
|
||||||
script.append("write_radio_image PACKAGE:radio.img")
|
script.Print("Writing radio image...")
|
||||||
output_zip.writestr("radio.img", target_radio)
|
script.WriteFirmwareImage("radio", "radio.img")
|
||||||
|
common.ZipWriteStr(output_zip, "radio.img", target_radio)
|
||||||
print "radio image changed; including."
|
print "radio image changed; including."
|
||||||
else:
|
else:
|
||||||
print "radio image unchanged; skipping."
|
print "radio image unchanged; skipping."
|
||||||
|
|
||||||
|
script.Print("Patching system files...")
|
||||||
pb_apply = progress_bar_total * 0.7 * \
|
pb_apply = progress_bar_total * 0.7 * \
|
||||||
(total_patched_size /
|
(total_patched_size /
|
||||||
float(total_patched_size+total_verbatim_size))
|
float(total_patched_size+total_verbatim_size+1))
|
||||||
for i, (fn, tf, sf, size) in enumerate(patch_list):
|
for i, (fn, tf, sf, size) in enumerate(patch_list):
|
||||||
if i % 5 == 0:
|
if i % 5 == 0:
|
||||||
next_sizes = sum([i[3] for i in patch_list[i:i+5]])
|
next_sizes = sum([i[3] for i in patch_list[i:i+5]])
|
||||||
script.append("show_progress %f 1" %
|
script.ShowProgress(next_sizes * pb_apply / (total_patched_size+1), 1)
|
||||||
(next_sizes * pb_apply / total_patched_size,))
|
script.ApplyPatch("/"+fn, "-", tf.size, tf.sha1,
|
||||||
script.append(("run_program PACKAGE:applypatch "
|
sf.sha1, "/tmp/patchtmp/"+fn+".p")
|
||||||
"/%s %s %d %s:/tmp/patchtmp/%s.p") %
|
|
||||||
(fn, tf.sha1, tf.size, sf.sha1, fn))
|
|
||||||
|
|
||||||
target_symlinks = CopySystemFiles(target_zip, None)
|
target_symlinks = CopySystemFiles(target_zip, None)
|
||||||
|
|
||||||
target_symlinks_d = dict([(i[1], i[0]) for i in target_symlinks])
|
target_symlinks_d = dict([(i[1], i[0]) for i in target_symlinks])
|
||||||
temp_script = []
|
temp_script = script.MakeTemporary()
|
||||||
FixPermissions(temp_script)
|
FixPermissions(temp_script)
|
||||||
|
|
||||||
# Note that this call will mess up the tree of Items, so make sure
|
# Note that this call will mess up the tree of Items, so make sure
|
||||||
@@ -564,14 +651,17 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
|||||||
for dest, link in source_symlinks:
|
for dest, link in source_symlinks:
|
||||||
if link not in target_symlinks_d:
|
if link not in target_symlinks_d:
|
||||||
to_delete.append(link)
|
to_delete.append(link)
|
||||||
DeleteFiles(script, to_delete)
|
script.DeleteFiles(to_delete)
|
||||||
|
|
||||||
if verbatim_targets:
|
if verbatim_targets:
|
||||||
pb_verbatim = progress_bar_total * \
|
pb_verbatim = progress_bar_total * \
|
||||||
(total_verbatim_size /
|
(total_verbatim_size /
|
||||||
float(total_patched_size+total_verbatim_size))
|
float(total_patched_size+total_verbatim_size+1))
|
||||||
script.append("show_progress %f 5" % (pb_verbatim,))
|
script.ShowProgress(pb_verbatim, 5)
|
||||||
script.append("copy_dir PACKAGE:system SYSTEM:")
|
script.Print("Unpacking new files...")
|
||||||
|
script.UnpackPackageDir("system", "/system")
|
||||||
|
|
||||||
|
script.Print("Finishing up...")
|
||||||
|
|
||||||
# Create all the symlinks that don't already exist, or point to
|
# Create all the symlinks that don't already exist, or point to
|
||||||
# somewhere different than what we want. Delete each symlink before
|
# somewhere different than what we want. Delete each symlink before
|
||||||
@@ -583,18 +673,17 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
|
|||||||
to_create.append((dest, link))
|
to_create.append((dest, link))
|
||||||
else:
|
else:
|
||||||
to_create.append((dest, link))
|
to_create.append((dest, link))
|
||||||
DeleteFiles(script, [i[1] for i in to_create])
|
script.DeleteFiles([i[1] for i in to_create])
|
||||||
script.extend(["symlink %s %s" % s for s in to_create])
|
script.MakeSymlinks(to_create)
|
||||||
|
|
||||||
# Now that the symlinks are created, we can set all the
|
# Now that the symlinks are created, we can set all the
|
||||||
# permissions.
|
# permissions.
|
||||||
script.extend(temp_script)
|
script.AppendScript(temp_script)
|
||||||
|
|
||||||
if updating_boot:
|
if OPTIONS.extra_script is not None:
|
||||||
script.append("show_progress 0.1 5")
|
scirpt.AppendExtra(OPTIONS.extra_script)
|
||||||
script.append("write_raw_image PACKAGE:boot.img BOOT:")
|
|
||||||
|
|
||||||
AddScript(script, output_zip)
|
script.AddToZip(target_zip, output_zip)
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
@@ -602,21 +691,31 @@ def main(argv):
|
|||||||
def option_handler(o, a):
|
def option_handler(o, a):
|
||||||
if o in ("-b", "--board_config"):
|
if o in ("-b", "--board_config"):
|
||||||
common.LoadBoardConfig(a)
|
common.LoadBoardConfig(a)
|
||||||
return True
|
|
||||||
elif o in ("-k", "--package_key"):
|
elif o in ("-k", "--package_key"):
|
||||||
OPTIONS.package_key = a
|
OPTIONS.package_key = a
|
||||||
return True
|
|
||||||
elif o in ("-i", "--incremental_from"):
|
elif o in ("-i", "--incremental_from"):
|
||||||
OPTIONS.incremental_source = a
|
OPTIONS.incremental_source = a
|
||||||
return True
|
elif o in ("-w", "--wipe_user_data"):
|
||||||
|
OPTIONS.wipe_user_data = True
|
||||||
|
elif o in ("-n", "--no_prereq"):
|
||||||
|
OPTIONS.omit_prereq = True
|
||||||
|
elif o in ("-e", "--extra_script"):
|
||||||
|
OPTIONS.extra_script = a
|
||||||
|
elif o in ("-m", "--script_mode"):
|
||||||
|
OPTIONS.script_mode = a
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
args = common.ParseOptions(argv, __doc__,
|
args = common.ParseOptions(argv, __doc__,
|
||||||
extra_opts="b:k:i:d:",
|
extra_opts="b:k:i:d:wne:m:",
|
||||||
extra_long_opts=["board_config=",
|
extra_long_opts=["board_config=",
|
||||||
"package_key=",
|
"package_key=",
|
||||||
"incremental_from="],
|
"incremental_from=",
|
||||||
|
"wipe_user_data",
|
||||||
|
"no_prereq",
|
||||||
|
"extra_script=",
|
||||||
|
"script_mode="],
|
||||||
extra_option_handler=option_handler)
|
extra_option_handler=option_handler)
|
||||||
|
|
||||||
if len(args) != 2:
|
if len(args) != 2:
|
||||||
@@ -630,6 +729,12 @@ def main(argv):
|
|||||||
print " images don't exceed partition sizes."
|
print " images don't exceed partition sizes."
|
||||||
print
|
print
|
||||||
|
|
||||||
|
if OPTIONS.script_mode not in ("amend", "edify", "auto"):
|
||||||
|
raise ValueError('unknown script mode "%s"' % (OPTIONS.script_mode,))
|
||||||
|
|
||||||
|
if OPTIONS.extra_script is not None:
|
||||||
|
OPTIONS.extra_script = open(OPTIONS.extra_script).read()
|
||||||
|
|
||||||
print "unzipping target target-files..."
|
print "unzipping target target-files..."
|
||||||
OPTIONS.input_tmp = common.UnzipTemp(args[0])
|
OPTIONS.input_tmp = common.UnzipTemp(args[0])
|
||||||
OPTIONS.target_tmp = OPTIONS.input_tmp
|
OPTIONS.target_tmp = OPTIONS.input_tmp
|
||||||
|
@@ -47,6 +47,20 @@ Usage: sign_target_files_apks [flags] input_target_files output_target_files
|
|||||||
|
|
||||||
-d and -k options are added to the set of mappings in the order
|
-d and -k options are added to the set of mappings in the order
|
||||||
in which they appear on the command line.
|
in which they appear on the command line.
|
||||||
|
|
||||||
|
-o (--replace_ota_keys)
|
||||||
|
Replace the certificate (public key) used by OTA package
|
||||||
|
verification with the one specified in the input target_files
|
||||||
|
zip (in the META/otakeys.txt file). Key remapping (-k and -d)
|
||||||
|
is performed on this key.
|
||||||
|
|
||||||
|
-t (--tag_changes) <+tag>,<-tag>,...
|
||||||
|
Comma-separated list of changes to make to the set of tags (in
|
||||||
|
the last component of the build fingerprint). Prefix each with
|
||||||
|
'+' or '-' to indicate whether that tag should be added or
|
||||||
|
removed. Changes are processed in the order they appear.
|
||||||
|
Default value is "-test-keys,+ota-rel-keys,+release-keys".
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
@@ -55,6 +69,8 @@ if sys.hexversion < 0x02040000:
|
|||||||
print >> sys.stderr, "Python 2.4 or newer is required."
|
print >> sys.stderr, "Python 2.4 or newer is required."
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
import cStringIO
|
||||||
|
import copy
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
@@ -67,7 +83,8 @@ OPTIONS = common.OPTIONS
|
|||||||
|
|
||||||
OPTIONS.extra_apks = {}
|
OPTIONS.extra_apks = {}
|
||||||
OPTIONS.key_map = {}
|
OPTIONS.key_map = {}
|
||||||
|
OPTIONS.replace_ota_keys = False
|
||||||
|
OPTIONS.tag_changes = ("-test-keys", "+ota-rel-keys", "+release-keys")
|
||||||
|
|
||||||
def GetApkCerts(tf_zip):
|
def GetApkCerts(tf_zip):
|
||||||
certmap = {}
|
certmap = {}
|
||||||
@@ -84,6 +101,85 @@ def GetApkCerts(tf_zip):
|
|||||||
return certmap
|
return certmap
|
||||||
|
|
||||||
|
|
||||||
|
def CheckAllApksSigned(input_tf_zip, apk_key_map):
|
||||||
|
"""Check that all the APKs we want to sign have keys specified, and
|
||||||
|
error out if they don't."""
|
||||||
|
unknown_apks = []
|
||||||
|
for info in input_tf_zip.infolist():
|
||||||
|
if info.filename.endswith(".apk"):
|
||||||
|
name = os.path.basename(info.filename)
|
||||||
|
if name not in apk_key_map:
|
||||||
|
unknown_apks.append(name)
|
||||||
|
if unknown_apks:
|
||||||
|
print "ERROR: no key specified for:\n\n ",
|
||||||
|
print "\n ".join(unknown_apks)
|
||||||
|
print "\nUse '-e <apkname>=' to specify a key (which may be an"
|
||||||
|
print "empty string to not sign this apk)."
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def SharedUserForApk(data):
|
||||||
|
tmp = tempfile.NamedTemporaryFile()
|
||||||
|
tmp.write(data)
|
||||||
|
tmp.flush()
|
||||||
|
|
||||||
|
p = common.Run(["aapt", "dump", "xmltree", tmp.name, "AndroidManifest.xml"],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
data, _ = p.communicate()
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise ExternalError("failed to run aapt dump")
|
||||||
|
lines = data.split("\n")
|
||||||
|
for i in lines:
|
||||||
|
m = re.match(r'^\s*A: android:sharedUserId\([0-9a-fx]*\)="([^"]*)" .*$', i)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def CheckSharedUserIdsConsistent(input_tf_zip, apk_key_map):
|
||||||
|
"""Check that all packages that request the same shared user id are
|
||||||
|
going to be signed with the same key."""
|
||||||
|
|
||||||
|
shared_user_apks = {}
|
||||||
|
maxlen = len("(unknown key)")
|
||||||
|
|
||||||
|
for info in input_tf_zip.infolist():
|
||||||
|
if info.filename.endswith(".apk"):
|
||||||
|
data = input_tf_zip.read(info.filename)
|
||||||
|
|
||||||
|
name = os.path.basename(info.filename)
|
||||||
|
shared_user = SharedUserForApk(data)
|
||||||
|
key = apk_key_map[name]
|
||||||
|
maxlen = max(maxlen, len(key))
|
||||||
|
|
||||||
|
if shared_user is not None:
|
||||||
|
shared_user_apks.setdefault(
|
||||||
|
shared_user, {}).setdefault(key, []).append(name)
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for k, v in shared_user_apks.iteritems():
|
||||||
|
# each shared user should have exactly one key used for all the
|
||||||
|
# apks that want that user.
|
||||||
|
if len(v) > 1:
|
||||||
|
errors.append((k, v))
|
||||||
|
|
||||||
|
if not errors: return
|
||||||
|
|
||||||
|
print "ERROR: shared user inconsistency. All apks wanting to use"
|
||||||
|
print " a given shared user must be signed with the same key."
|
||||||
|
print
|
||||||
|
errors.sort()
|
||||||
|
for user, keys in errors:
|
||||||
|
print 'shared user id "%s":' % (user,)
|
||||||
|
for key, apps in keys.iteritems():
|
||||||
|
print ' %-*s %s' % (maxlen, key or "(unknown key)", apps[0])
|
||||||
|
for a in apps[1:]:
|
||||||
|
print (' ' * (maxlen+5)) + a
|
||||||
|
print
|
||||||
|
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def SignApk(data, keyname, pw):
|
def SignApk(data, keyname, pw):
|
||||||
unsigned = tempfile.NamedTemporaryFile()
|
unsigned = tempfile.NamedTemporaryFile()
|
||||||
unsigned.write(data)
|
unsigned.write(data)
|
||||||
@@ -100,44 +196,107 @@ def SignApk(data, keyname, pw):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def SignApks(input_tf_zip, output_tf_zip):
|
def SignApks(input_tf_zip, output_tf_zip, apk_key_map, key_passwords):
|
||||||
apk_key_map = GetApkCerts(input_tf_zip)
|
|
||||||
|
|
||||||
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
|
|
||||||
|
|
||||||
maxsize = max([len(os.path.basename(i.filename))
|
maxsize = max([len(os.path.basename(i.filename))
|
||||||
for i in input_tf_zip.infolist()
|
for i in input_tf_zip.infolist()
|
||||||
if i.filename.endswith('.apk')])
|
if i.filename.endswith('.apk')])
|
||||||
|
|
||||||
for info in input_tf_zip.infolist():
|
for info in input_tf_zip.infolist():
|
||||||
data = input_tf_zip.read(info.filename)
|
data = input_tf_zip.read(info.filename)
|
||||||
|
out_info = copy.copy(info)
|
||||||
if info.filename.endswith(".apk"):
|
if info.filename.endswith(".apk"):
|
||||||
name = os.path.basename(info.filename)
|
name = os.path.basename(info.filename)
|
||||||
key = apk_key_map.get(name, None)
|
key = apk_key_map[name]
|
||||||
if key is not None:
|
if key:
|
||||||
print "signing: %-*s (%s)" % (maxsize, name, key)
|
print " signing: %-*s (%s)" % (maxsize, name, key)
|
||||||
signed_data = SignApk(data, key, key_passwords[key])
|
signed_data = SignApk(data, key, key_passwords[key])
|
||||||
output_tf_zip.writestr(info, signed_data)
|
output_tf_zip.writestr(out_info, signed_data)
|
||||||
else:
|
else:
|
||||||
# an APK we're not supposed to sign.
|
# an APK we're not supposed to sign.
|
||||||
print "skipping: %s" % (name,)
|
print "NOT signing: %s" % (name,)
|
||||||
output_tf_zip.writestr(info, data)
|
output_tf_zip.writestr(out_info, data)
|
||||||
elif info.filename == "SYSTEM/build.prop":
|
elif info.filename in ("SYSTEM/build.prop",
|
||||||
# Change build fingerprint to reflect the fact that apps are signed.
|
"RECOVERY/RAMDISK/default.prop"):
|
||||||
m = re.search(r"ro\.build\.fingerprint=.*\b(test-keys)\b.*", data)
|
print "rewriting %s:" % (info.filename,)
|
||||||
if not m:
|
new_data = RewriteProps(data)
|
||||||
print 'WARNING: ro.build.fingerprint does not contain "test-keys"'
|
output_tf_zip.writestr(out_info, new_data)
|
||||||
else:
|
|
||||||
data = data[:m.start(1)] + "release-keys" + data[m.end(1):]
|
|
||||||
m = re.search(r"ro\.build\.description=.*\b(test-keys)\b.*", data)
|
|
||||||
if not m:
|
|
||||||
print 'WARNING: ro.build.description does not contain "test-keys"'
|
|
||||||
else:
|
|
||||||
data = data[:m.start(1)] + "release-keys" + data[m.end(1):]
|
|
||||||
output_tf_zip.writestr(info, data)
|
|
||||||
else:
|
else:
|
||||||
# a non-APK file; copy it verbatim
|
# a non-APK file; copy it verbatim
|
||||||
output_tf_zip.writestr(info, data)
|
output_tf_zip.writestr(out_info, data)
|
||||||
|
|
||||||
|
|
||||||
|
def RewriteProps(data):
|
||||||
|
output = []
|
||||||
|
for line in data.split("\n"):
|
||||||
|
line = line.strip()
|
||||||
|
original_line = line
|
||||||
|
if line and line[0] != '#':
|
||||||
|
key, value = line.split("=", 1)
|
||||||
|
if key == "ro.build.fingerprint":
|
||||||
|
pieces = line.split("/")
|
||||||
|
tags = set(pieces[-1].split(","))
|
||||||
|
for ch in OPTIONS.tag_changes:
|
||||||
|
if ch[0] == "-":
|
||||||
|
tags.discard(ch[1:])
|
||||||
|
elif ch[0] == "+":
|
||||||
|
tags.add(ch[1:])
|
||||||
|
line = "/".join(pieces[:-1] + [",".join(sorted(tags))])
|
||||||
|
elif key == "ro.build.description":
|
||||||
|
pieces = line.split(" ")
|
||||||
|
assert len(pieces) == 5
|
||||||
|
tags = set(pieces[-1].split(","))
|
||||||
|
for ch in OPTIONS.tag_changes:
|
||||||
|
if ch[0] == "-":
|
||||||
|
tags.discard(ch[1:])
|
||||||
|
elif ch[0] == "+":
|
||||||
|
tags.add(ch[1:])
|
||||||
|
line = " ".join(pieces[:-1] + [",".join(sorted(tags))])
|
||||||
|
if line != original_line:
|
||||||
|
print " replace: ", original_line
|
||||||
|
print " with: ", line
|
||||||
|
output.append(line)
|
||||||
|
return "\n".join(output) + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
def ReplaceOtaKeys(input_tf_zip, output_tf_zip):
|
||||||
|
try:
|
||||||
|
keylist = input_tf_zip.read("META/otakeys.txt").split()
|
||||||
|
except KeyError:
|
||||||
|
raise ExternalError("can't read META/otakeys.txt from input")
|
||||||
|
|
||||||
|
mapped_keys = []
|
||||||
|
for k in keylist:
|
||||||
|
m = re.match(r"^(.*)\.x509\.pem$", k)
|
||||||
|
if not m:
|
||||||
|
raise ExternalError("can't parse \"%s\" from META/otakeys.txt" % (k,))
|
||||||
|
k = m.group(1)
|
||||||
|
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
|
||||||
|
|
||||||
|
print "using:\n ", "\n ".join(mapped_keys)
|
||||||
|
print "for OTA package verification"
|
||||||
|
|
||||||
|
# recovery uses a version of the key that has been slightly
|
||||||
|
# predigested (by DumpPublicKey.java) and put in res/keys.
|
||||||
|
|
||||||
|
p = common.Run(["java", "-jar",
|
||||||
|
os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
|
||||||
|
+ mapped_keys,
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
data, _ = p.communicate()
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise ExternalError("failed to run dumpkeys")
|
||||||
|
common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys", data)
|
||||||
|
|
||||||
|
# SystemUpdateActivity uses the x509.pem version of the keys, but
|
||||||
|
# put into a zipfile system/etc/security/otacerts.zip.
|
||||||
|
|
||||||
|
tempfile = cStringIO.StringIO()
|
||||||
|
certs_zip = zipfile.ZipFile(tempfile, "w")
|
||||||
|
for k in mapped_keys:
|
||||||
|
certs_zip.write(k)
|
||||||
|
certs_zip.close()
|
||||||
|
common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
|
||||||
|
tempfile.getvalue())
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
@@ -160,16 +319,28 @@ def main(argv):
|
|||||||
elif o in ("-k", "--key_mapping"):
|
elif o in ("-k", "--key_mapping"):
|
||||||
s, d = a.split("=")
|
s, d = a.split("=")
|
||||||
OPTIONS.key_map[s] = d
|
OPTIONS.key_map[s] = d
|
||||||
|
elif o in ("-o", "--replace_ota_keys"):
|
||||||
|
OPTIONS.replace_ota_keys = True
|
||||||
|
elif o in ("-t", "--tag_changes"):
|
||||||
|
new = []
|
||||||
|
for i in a.split(","):
|
||||||
|
i = i.strip()
|
||||||
|
if not i or i[0] not in "-+":
|
||||||
|
raise ValueError("Bad tag change '%s'" % (i,))
|
||||||
|
new.append(i[0] + i[1:].strip())
|
||||||
|
OPTIONS.tag_changes = tuple(new)
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
args = common.ParseOptions(argv, __doc__,
|
args = common.ParseOptions(argv, __doc__,
|
||||||
extra_opts="s:e:d:k:",
|
extra_opts="s:e:d:k:ot:",
|
||||||
extra_long_opts=["signapk_jar=",
|
extra_long_opts=["signapk_jar=",
|
||||||
"extra_apks=",
|
"extra_apks=",
|
||||||
"default_key_mappings=",
|
"default_key_mappings=",
|
||||||
"key_mapping="],
|
"key_mapping=",
|
||||||
|
"replace_ota_keys",
|
||||||
|
"tag_changes="],
|
||||||
extra_option_handler=option_handler)
|
extra_option_handler=option_handler)
|
||||||
|
|
||||||
if len(args) != 2:
|
if len(args) != 2:
|
||||||
@@ -179,7 +350,15 @@ def main(argv):
|
|||||||
input_zip = zipfile.ZipFile(args[0], "r")
|
input_zip = zipfile.ZipFile(args[0], "r")
|
||||||
output_zip = zipfile.ZipFile(args[1], "w")
|
output_zip = zipfile.ZipFile(args[1], "w")
|
||||||
|
|
||||||
SignApks(input_zip, output_zip)
|
apk_key_map = GetApkCerts(input_zip)
|
||||||
|
CheckAllApksSigned(input_zip, apk_key_map)
|
||||||
|
CheckSharedUserIdsConsistent(input_zip, apk_key_map)
|
||||||
|
|
||||||
|
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
|
||||||
|
SignApks(input_zip, output_zip, apk_key_map, key_passwords)
|
||||||
|
|
||||||
|
if OPTIONS.replace_ota_keys:
|
||||||
|
ReplaceOtaKeys(input_zip, output_zip)
|
||||||
|
|
||||||
input_zip.close()
|
input_zip.close()
|
||||||
output_zip.close()
|
output_zip.close()
|
||||||
|
@@ -62,6 +62,7 @@ import java.util.jar.JarEntry;
|
|||||||
import java.util.jar.JarFile;
|
import java.util.jar.JarFile;
|
||||||
import java.util.jar.JarOutputStream;
|
import java.util.jar.JarOutputStream;
|
||||||
import java.util.jar.Manifest;
|
import java.util.jar.Manifest;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
import javax.crypto.Cipher;
|
import javax.crypto.Cipher;
|
||||||
import javax.crypto.EncryptedPrivateKeyInfo;
|
import javax.crypto.EncryptedPrivateKeyInfo;
|
||||||
import javax.crypto.SecretKeyFactory;
|
import javax.crypto.SecretKeyFactory;
|
||||||
@@ -75,6 +76,10 @@ class SignApk {
|
|||||||
private static final String CERT_SF_NAME = "META-INF/CERT.SF";
|
private static final String CERT_SF_NAME = "META-INF/CERT.SF";
|
||||||
private static final String CERT_RSA_NAME = "META-INF/CERT.RSA";
|
private static final String CERT_RSA_NAME = "META-INF/CERT.RSA";
|
||||||
|
|
||||||
|
// Files matching this pattern are not copied to the output.
|
||||||
|
private static Pattern stripPattern =
|
||||||
|
Pattern.compile("^META-INF/(.*)[.](SF|RSA|DSA)$");
|
||||||
|
|
||||||
private static X509Certificate readPublicKey(File file)
|
private static X509Certificate readPublicKey(File file)
|
||||||
throws IOException, GeneralSecurityException {
|
throws IOException, GeneralSecurityException {
|
||||||
FileInputStream input = new FileInputStream(file);
|
FileInputStream input = new FileInputStream(file);
|
||||||
@@ -193,7 +198,9 @@ class SignApk {
|
|||||||
for (JarEntry entry: byName.values()) {
|
for (JarEntry entry: byName.values()) {
|
||||||
String name = entry.getName();
|
String name = entry.getName();
|
||||||
if (!entry.isDirectory() && !name.equals(JarFile.MANIFEST_NAME) &&
|
if (!entry.isDirectory() && !name.equals(JarFile.MANIFEST_NAME) &&
|
||||||
!name.equals(CERT_SF_NAME) && !name.equals(CERT_RSA_NAME)) {
|
!name.equals(CERT_SF_NAME) && !name.equals(CERT_RSA_NAME) &&
|
||||||
|
(stripPattern == null ||
|
||||||
|
!stripPattern.matcher(name).matches())) {
|
||||||
InputStream data = jar.getInputStream(entry);
|
InputStream data = jar.getInputStream(entry);
|
||||||
while ((num = data.read(buffer)) > 0) {
|
while ((num = data.read(buffer)) > 0) {
|
||||||
md.update(buffer, 0, num);
|
md.update(buffer, 0, num);
|
||||||
|
@@ -30,7 +30,8 @@ void usage(void)
|
|||||||
{
|
{
|
||||||
fprintf(stderr, "Zip alignment utility\n");
|
fprintf(stderr, "Zip alignment utility\n");
|
||||||
fprintf(stderr,
|
fprintf(stderr,
|
||||||
"Usage: zipalign [-f] [-v] <align> infile.zip outfile.zip\n");
|
"Usage: zipalign [-f] [-v] <align> infile.zip outfile.zip\n"
|
||||||
|
" zipalign -c [-v] <align> infile.zip\n" );
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -152,14 +153,14 @@ static int verify(const char* fileName, int alignment, bool verbose)
|
|||||||
pEntry = zipFile.getEntryByIndex(i);
|
pEntry = zipFile.getEntryByIndex(i);
|
||||||
if (pEntry->isCompressed()) {
|
if (pEntry->isCompressed()) {
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
printf("%8ld %s (OK - compressed)\n",
|
printf("%8ld %s (OK - compressed)\n",
|
||||||
(long) pEntry->getFileOffset(), pEntry->getFileName());
|
(long) pEntry->getFileOffset(), pEntry->getFileName());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
long offset = pEntry->getFileOffset();
|
long offset = pEntry->getFileOffset();
|
||||||
if ((offset % alignment) != 0) {
|
if ((offset % alignment) != 0) {
|
||||||
if (verbose) {
|
if (verbose) {
|
||||||
printf("%8ld %s (BAD - %ld)\n",
|
printf("%8ld %s (BAD - %ld)\n",
|
||||||
(long) offset, pEntry->getFileName(),
|
(long) offset, pEntry->getFileName(),
|
||||||
offset % alignment);
|
offset % alignment);
|
||||||
}
|
}
|
||||||
@@ -185,6 +186,7 @@ static int verify(const char* fileName, int alignment, bool verbose)
|
|||||||
int main(int argc, char* const argv[])
|
int main(int argc, char* const argv[])
|
||||||
{
|
{
|
||||||
bool wantUsage = false;
|
bool wantUsage = false;
|
||||||
|
bool check = false;
|
||||||
bool force = false;
|
bool force = false;
|
||||||
bool verbose = false;
|
bool verbose = false;
|
||||||
int result = 1;
|
int result = 1;
|
||||||
@@ -204,6 +206,9 @@ int main(int argc, char* const argv[])
|
|||||||
|
|
||||||
while (*cp != '\0') {
|
while (*cp != '\0') {
|
||||||
switch (*cp) {
|
switch (*cp) {
|
||||||
|
case 'c':
|
||||||
|
check = true;
|
||||||
|
break;
|
||||||
case 'f':
|
case 'f':
|
||||||
force = true;
|
force = true;
|
||||||
break;
|
break;
|
||||||
@@ -223,7 +228,7 @@ int main(int argc, char* const argv[])
|
|||||||
argv++;
|
argv++;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (argc != 3) {
|
if (!((check && argc == 2) || (!check && argc == 3))) {
|
||||||
wantUsage = true;
|
wantUsage = true;
|
||||||
goto bail;
|
goto bail;
|
||||||
}
|
}
|
||||||
@@ -235,12 +240,17 @@ int main(int argc, char* const argv[])
|
|||||||
goto bail;
|
goto bail;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* create the new archive */
|
if (check) {
|
||||||
result = process(argv[1], argv[2], alignment, force);
|
/* check existing archive for correct alignment */
|
||||||
|
result = verify(argv[1], alignment, verbose);
|
||||||
|
} else {
|
||||||
|
/* create the new archive */
|
||||||
|
result = process(argv[1], argv[2], alignment, force);
|
||||||
|
|
||||||
/* trust, but verify */
|
/* trust, but verify */
|
||||||
if (result == 0)
|
if (result == 0)
|
||||||
result = verify(argv[2], alignment, verbose);
|
result = verify(argv[2], alignment, verbose);
|
||||||
|
}
|
||||||
|
|
||||||
bail:
|
bail:
|
||||||
if (wantUsage) {
|
if (wantUsage) {
|
||||||
@@ -250,4 +260,3 @@ bail:
|
|||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user