Invoke delta_generator directly
Currently, ota_from_target_files(a python script) calls brillo_update_payload(a bash script), which then calls delta_generator(a C++ binary) to do the actual diffing. Having brillo_update_payload in the middle does not offer any additional flexibility, but it makes maintaince more difficult. Bash code is less readable and harder to debug. Further more, everytime we want to add a new flag to delta_generator, we have to add the flag in three places: ota_from_target_files, brillo_update_payload, delta_generator. Historically, brillo_update_payload was there because we inherited from ChromeOS side. This bash scripts extracts target_files.zip and invoke delta_generator to generate the actual OTA. Any customizations we want on OTA must be implemented by modifying the input target_files.zip , Manipuating big zip files is slow and inefficient. To make thing simpler/faster, remove the intermdiary call to brill_update_payload. ota_from_target_files will now extract target files and call delta_generator directly. Test: th Bug: 227848550 Change-Id: I44b296e43bf7921bcf48ef6a1e4021a12669739c
This commit is contained in:
@@ -4001,3 +4001,26 @@ def IsSparseImage(filepath):
|
||||
# Magic for android sparse image format
|
||||
# https://source.android.com/devices/bootloader/images
|
||||
return fp.read(4) == b'\x3A\xFF\x26\xED'
|
||||
|
||||
def ParseUpdateEngineConfig(path: str):
|
||||
"""Parse the update_engine config stored in file `path`
|
||||
Args
|
||||
path: Path to update_engine_config.txt file in target_files
|
||||
|
||||
Returns
|
||||
A tuple of (major, minor) version number . E.g. (2, 8)
|
||||
"""
|
||||
with open(path, "r") as fp:
|
||||
# update_engine_config.txt is only supposed to contain two lines,
|
||||
# PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
|
||||
# sufficient. If the length is more than that, something is wrong.
|
||||
data = fp.read(1024)
|
||||
major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
|
||||
if not major:
|
||||
raise ValueError(
|
||||
f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
|
||||
minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
|
||||
if not minor:
|
||||
raise ValueError(
|
||||
f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
|
||||
return (int(major.group(1)), int(minor.group(1)))
|
@@ -29,6 +29,7 @@ import common
|
||||
import merge_utils
|
||||
import sparse_img
|
||||
import verity_utils
|
||||
from ota_utils import ParseUpdateEngineConfig
|
||||
|
||||
from common import ExternalError
|
||||
|
||||
@@ -52,28 +53,6 @@ PARTITION_TAG_PATTERN = re.compile(r'partition="(.*?)"')
|
||||
MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
|
||||
|
||||
|
||||
def ParseUpdateEngineConfig(path: str):
|
||||
"""Parse the update_engine config stored in file `path`
|
||||
Args
|
||||
path: Path to update_engine_config.txt file in target_files
|
||||
|
||||
Returns
|
||||
A tuple of (major, minor) version number . E.g. (2, 8)
|
||||
"""
|
||||
with open(path, "r") as fp:
|
||||
# update_engine_config.txt is only supposed to contain two lines,
|
||||
# PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
|
||||
# sufficient. If the length is more than that, something is wrong.
|
||||
data = fp.read(1024)
|
||||
major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
|
||||
if not major:
|
||||
raise ValueError(
|
||||
f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
|
||||
minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
|
||||
if not minor:
|
||||
raise ValueError(
|
||||
f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
|
||||
return (int(major.group(1)), int(minor.group(1)))
|
||||
|
||||
|
||||
def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
|
||||
|
@@ -906,7 +906,8 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
OPTIONS.enable_vabc_xor = False
|
||||
|
||||
if OPTIONS.vabc_compression_param == "none":
|
||||
logger.info("VABC Compression algorithm is set to 'none', disabling VABC xor")
|
||||
logger.info(
|
||||
"VABC Compression algorithm is set to 'none', disabling VABC xor")
|
||||
OPTIONS.enable_vabc_xor = False
|
||||
additional_args = []
|
||||
|
||||
@@ -922,7 +923,6 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
elif OPTIONS.partial:
|
||||
target_file = GetTargetFilesZipForPartialUpdates(target_file,
|
||||
OPTIONS.partial)
|
||||
additional_args += ["--is_partial_update", "true"]
|
||||
elif OPTIONS.vabc_compression_param:
|
||||
target_file = GetTargetFilesZipForCustomVABCCompression(
|
||||
target_file, OPTIONS.vabc_compression_param)
|
||||
@@ -938,7 +938,8 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
# Metadata to comply with Android OTA package format.
|
||||
metadata = GetPackageMetadata(target_info, source_info)
|
||||
# Generate payload.
|
||||
payload = PayloadGenerator(wipe_user_data=OPTIONS.wipe_user_data)
|
||||
payload = PayloadGenerator(
|
||||
wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
|
||||
|
||||
partition_timestamps_flags = []
|
||||
# Enforce a max timestamp this payload can be applied on top of.
|
||||
@@ -965,7 +966,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
|
||||
additional_args += ["--security_patch_level", security_patch_level]
|
||||
|
||||
additional_args += ["--enable_zucchini",
|
||||
additional_args += ["--enable_zucchini=" +
|
||||
str(OPTIONS.enable_zucchini).lower()]
|
||||
|
||||
if not ota_utils.IsLz4diffCompatible(source_file, target_file):
|
||||
@@ -973,7 +974,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
"Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
|
||||
OPTIONS.enable_lz4diff = False
|
||||
|
||||
additional_args += ["--enable_lz4diff",
|
||||
additional_args += ["--enable_lz4diff=" +
|
||||
str(OPTIONS.enable_lz4diff).lower()]
|
||||
|
||||
if source_file and OPTIONS.enable_lz4diff:
|
||||
@@ -989,20 +990,13 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
||||
additional_args += ["--erofs_compression_param", erofs_compression_param]
|
||||
|
||||
if OPTIONS.disable_vabc:
|
||||
additional_args += ["--disable_vabc", "true"]
|
||||
additional_args += ["--disable_vabc=true"]
|
||||
if OPTIONS.enable_vabc_xor:
|
||||
additional_args += ["--enable_vabc_xor", "true"]
|
||||
if OPTIONS.force_minor_version:
|
||||
additional_args += ["--force_minor_version", OPTIONS.force_minor_version]
|
||||
additional_args += ["--enable_vabc_xor=true"]
|
||||
if OPTIONS.compressor_types:
|
||||
additional_args += ["--compressor_types", OPTIONS.compressor_types]
|
||||
additional_args += ["--max_timestamp", max_timestamp]
|
||||
|
||||
if SupportsMainlineGkiUpdates(source_file):
|
||||
logger.warning(
|
||||
"Detected build with mainline GKI, include full boot image.")
|
||||
additional_args.extend(["--full_boot", "true"])
|
||||
|
||||
payload.Generate(
|
||||
target_file,
|
||||
source_file,
|
||||
@@ -1363,7 +1357,8 @@ def main(argv):
|
||||
"what(even if data wipe is done), so SPL downgrade on any "
|
||||
"release-keys build is not allowed.".format(target_spl, source_spl))
|
||||
|
||||
logger.info("SPL downgrade on %s", target_build_prop.GetProp("ro.build.tags"))
|
||||
logger.info("SPL downgrade on %s",
|
||||
target_build_prop.GetProp("ro.build.tags"))
|
||||
if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
|
||||
raise common.ExternalError(
|
||||
"Target security patch level {} is older than source SPL {} applying "
|
||||
|
@@ -25,7 +25,7 @@ import common
|
||||
from common import (ZipDelete, OPTIONS, MakeTempFile,
|
||||
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
|
||||
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
|
||||
GetRamdiskFormat)
|
||||
GetRamdiskFormat, ParseUpdateEngineConfig)
|
||||
from payload_signer import PayloadSigner
|
||||
|
||||
|
||||
@@ -135,7 +135,8 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=No
|
||||
logger.info(f"Signing disabled for output file {output_file}")
|
||||
shutil.copy(prelim_signing, output_file)
|
||||
else:
|
||||
logger.info(f"Signing the output file {output_file} with key {package_key}")
|
||||
logger.info(
|
||||
f"Signing the output file {output_file} with key {package_key}")
|
||||
SignOutput(prelim_signing, output_file, package_key, pw)
|
||||
|
||||
# Reopen the final signed zip to double check the streaming metadata.
|
||||
@@ -721,6 +722,45 @@ def IsZucchiniCompatible(source_file: str, target_file: str):
|
||||
return sourceEntry and targetEntry and sourceEntry == targetEntry
|
||||
|
||||
|
||||
def ExtractTargetFiles(path: str):
|
||||
if os.path.isdir(path):
|
||||
logger.info("target files %s is already extracted", path)
|
||||
return path
|
||||
extracted_dir = common.MakeTempDir("target_files")
|
||||
common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN)
|
||||
return extracted_dir
|
||||
|
||||
|
||||
def LocatePartitionPath(target_files_dir: str, partition: str, allow_empty):
|
||||
path = os.path.join(target_files_dir, "RADIO", partition + ".img")
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
path = os.path.join(target_files_dir, "IMAGES", partition + ".img")
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
if allow_empty:
|
||||
return ""
|
||||
raise common.ExternalError(
|
||||
"Partition {} not found in target files {}".format(partition, target_files_dir))
|
||||
|
||||
|
||||
def GetPartitionImages(target_files_dir: str, ab_partitions, allow_empty=True):
|
||||
assert os.path.isdir(target_files_dir)
|
||||
return ":".join([LocatePartitionPath(target_files_dir, partition, allow_empty) for partition in ab_partitions])
|
||||
|
||||
|
||||
def LocatePartitionMap(target_files_dir: str, partition: str):
|
||||
path = os.path.join(target_files_dir, "RADIO", partition + ".map")
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return ""
|
||||
|
||||
|
||||
def GetPartitionMaps(target_files_dir: str, ab_partitions):
|
||||
assert os.path.isdir(target_files_dir)
|
||||
return ":".join([LocatePartitionMap(target_files_dir, partition) for partition in ab_partitions])
|
||||
|
||||
|
||||
class PayloadGenerator(object):
|
||||
"""Manages the creation and the signing of an A/B OTA Payload."""
|
||||
|
||||
@@ -729,7 +769,7 @@ class PayloadGenerator(object):
|
||||
SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
|
||||
SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
|
||||
|
||||
def __init__(self, secondary=False, wipe_user_data=False):
|
||||
def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
|
||||
"""Initializes a Payload instance.
|
||||
|
||||
Args:
|
||||
@@ -739,6 +779,8 @@ class PayloadGenerator(object):
|
||||
self.payload_properties = None
|
||||
self.secondary = secondary
|
||||
self.wipe_user_data = wipe_user_data
|
||||
self.minor_version = minor_version
|
||||
self.is_partial_update = is_partial_update
|
||||
|
||||
def _Run(self, cmd): # pylint: disable=no-self-use
|
||||
# Don't pipe (buffer) the output if verbose is set. Let
|
||||
@@ -757,21 +799,53 @@ class PayloadGenerator(object):
|
||||
source_file: The filename of the source build target-files zip; or None if
|
||||
generating a full OTA.
|
||||
additional_args: A list of additional args that should be passed to
|
||||
brillo_update_payload script; or None.
|
||||
delta_generator binary; or None.
|
||||
"""
|
||||
if additional_args is None:
|
||||
additional_args = []
|
||||
|
||||
payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
|
||||
cmd = ["brillo_update_payload", "generate",
|
||||
"--payload", payload_file,
|
||||
"--target_image", target_file]
|
||||
target_dir = ExtractTargetFiles(target_file)
|
||||
cmd = ["delta_generator",
|
||||
"--out_file", payload_file]
|
||||
with open(os.path.join(target_dir, "META", "ab_partitions.txt")) as fp:
|
||||
ab_partitions = fp.read().strip().split("\n")
|
||||
cmd.extend(["--partition_names", ":".join(ab_partitions)])
|
||||
cmd.extend(
|
||||
["--new_partitions", GetPartitionImages(target_dir, ab_partitions, False)])
|
||||
cmd.extend(
|
||||
["--new_mapfiles", GetPartitionMaps(target_dir, ab_partitions)])
|
||||
if source_file is not None:
|
||||
cmd.extend(["--source_image", source_file])
|
||||
source_dir = ExtractTargetFiles(source_file)
|
||||
cmd.extend(
|
||||
["--old_partitions", GetPartitionImages(source_dir, ab_partitions, True)])
|
||||
cmd.extend(
|
||||
["--old_mapfiles", GetPartitionMaps(source_dir, ab_partitions)])
|
||||
|
||||
if OPTIONS.disable_fec_computation:
|
||||
cmd.extend(["--disable_fec_computation", "true"])
|
||||
cmd.extend(["--disable_fec_computation=true"])
|
||||
if OPTIONS.disable_verity_computation:
|
||||
cmd.extend(["--disable_verity_computation", "true"])
|
||||
cmd.extend(["--disable_verity_computation=true"])
|
||||
postinstall_config = os.path.join(
|
||||
target_dir, "META", "postinstall_config.txt")
|
||||
|
||||
if os.path.exists(postinstall_config):
|
||||
cmd.extend(["--new_postinstall_config_file", postinstall_config])
|
||||
dynamic_partition_info = os.path.join(
|
||||
target_dir, "META", "dynamic_partitions_info.txt")
|
||||
|
||||
if os.path.exists(dynamic_partition_info):
|
||||
cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
|
||||
|
||||
major_version, minor_version = ParseUpdateEngineConfig(
|
||||
os.path.join(target_dir, "META", "update_engine_config.txt"))
|
||||
if self.minor_version:
|
||||
minor_version = self.minor_version
|
||||
cmd.extend(["--major_version", str(major_version)])
|
||||
if source_file is not None or self.is_partial_update:
|
||||
cmd.extend(["--minor_version", str(minor_version)])
|
||||
if self.is_partial_update:
|
||||
cmd.extend(["--is_partial_update=true"])
|
||||
cmd.extend(additional_args)
|
||||
self._Run(cmd)
|
||||
|
||||
|
Reference in New Issue
Block a user