Generate partition timestamps in ota_from_target_files
Test: make an OTA package Bug: 162553432 Change-Id: I17b9f1b24307255b1e5115de12fa516126b32365
This commit is contained in:
@@ -118,7 +118,7 @@ AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'recovery', 'system',
|
|||||||
AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
|
AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
|
||||||
|
|
||||||
# Partitions that should have their care_map added to META/care_map.pb
|
# Partitions that should have their care_map added to META/care_map.pb
|
||||||
PARTITIONS_WITH_CARE_MAP = (
|
PARTITIONS_WITH_CARE_MAP = [
|
||||||
'system',
|
'system',
|
||||||
'vendor',
|
'vendor',
|
||||||
'product',
|
'product',
|
||||||
@@ -126,7 +126,7 @@ PARTITIONS_WITH_CARE_MAP = (
|
|||||||
'odm',
|
'odm',
|
||||||
'vendor_dlkm',
|
'vendor_dlkm',
|
||||||
'odm_dlkm',
|
'odm_dlkm',
|
||||||
)
|
]
|
||||||
|
|
||||||
|
|
||||||
class ErrorCode(object):
|
class ErrorCode(object):
|
||||||
@@ -729,10 +729,14 @@ def LoadInfoDict(input_file, repacking=False):
|
|||||||
fingerprint = build_info.GetPartitionFingerprint(partition)
|
fingerprint = build_info.GetPartitionFingerprint(partition)
|
||||||
if fingerprint:
|
if fingerprint:
|
||||||
d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
|
d["avb_{}_salt".format(partition)] = sha256(fingerprint.encode()).hexdigest()
|
||||||
|
try:
|
||||||
|
d["ab_partitions"] = read_helper("META/ab_partitions.txt").split("\n")
|
||||||
|
except KeyError:
|
||||||
|
logger.warning("Can't find META/ab_partitions.txt")
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def LoadListFromFile(file_path):
|
def LoadListFromFile(file_path):
|
||||||
with open(file_path) as f:
|
with open(file_path) as f:
|
||||||
return f.read().splitlines()
|
return f.read().splitlines()
|
||||||
|
@@ -825,31 +825,49 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
|||||||
compression=zipfile.ZIP_DEFLATED)
|
compression=zipfile.ZIP_DEFLATED)
|
||||||
|
|
||||||
if source_file is not None:
|
if source_file is not None:
|
||||||
|
assert "ab_partitions" in OPTIONS.source_info_dict, \
|
||||||
|
"META/ab_partitions.txt is required for ab_update."
|
||||||
|
assert "ab_partitions" in OPTIONS.target_info_dict, \
|
||||||
|
"META/ab_partitions.txt is required for ab_update."
|
||||||
target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
|
target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
|
||||||
source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
|
source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
|
||||||
else:
|
else:
|
||||||
|
assert "ab_partitions" in OPTIONS.info_dict, \
|
||||||
|
"META/ab_partitions.txt is required for ab_update."
|
||||||
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
|
target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
|
||||||
source_info = None
|
source_info = None
|
||||||
|
|
||||||
# Metadata to comply with Android OTA package format.
|
|
||||||
metadata = GetPackageMetadata(target_info, source_info)
|
|
||||||
|
|
||||||
if OPTIONS.retrofit_dynamic_partitions:
|
if OPTIONS.retrofit_dynamic_partitions:
|
||||||
target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
|
target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
|
||||||
target_file, target_info.get("super_block_devices").strip().split(),
|
target_file, target_info.get("super_block_devices").strip().split(),
|
||||||
target_info.get("dynamic_partition_list").strip().split())
|
target_info.get("dynamic_partition_list").strip().split())
|
||||||
elif OPTIONS.skip_postinstall:
|
elif OPTIONS.skip_postinstall:
|
||||||
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
|
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
|
||||||
|
# Target_file may have been modified, reparse ab_partitions
|
||||||
|
with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
|
||||||
|
target_info.info_dict['ab_partitions'] = zfp.read(
|
||||||
|
AB_PARTITIONS).strip().split("\n")
|
||||||
|
|
||||||
|
# Metadata to comply with Android OTA package format.
|
||||||
|
metadata = GetPackageMetadata(target_info, source_info)
|
||||||
# Generate payload.
|
# Generate payload.
|
||||||
payload = Payload()
|
payload = Payload()
|
||||||
|
|
||||||
|
partition_timestamps = []
|
||||||
# Enforce a max timestamp this payload can be applied on top of.
|
# Enforce a max timestamp this payload can be applied on top of.
|
||||||
if OPTIONS.downgrade:
|
if OPTIONS.downgrade:
|
||||||
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
||||||
else:
|
else:
|
||||||
max_timestamp = str(metadata.postcondition.timestamp)
|
max_timestamp = str(metadata.postcondition.timestamp)
|
||||||
|
partition_timestamps = [
|
||||||
|
part.partition_name + ":" + part.version
|
||||||
|
for part in metadata.postcondition.partition_state]
|
||||||
additional_args = ["--max_timestamp", max_timestamp]
|
additional_args = ["--max_timestamp", max_timestamp]
|
||||||
|
if partition_timestamps:
|
||||||
|
additional_args.extend(
|
||||||
|
["--partition_timestamps", ",".join(
|
||||||
|
partition_timestamps)]
|
||||||
|
)
|
||||||
|
|
||||||
payload.Generate(target_file, source_file, additional_args)
|
payload.Generate(target_file, source_file, additional_args)
|
||||||
|
|
||||||
|
@@ -162,10 +162,18 @@ def UpdateDeviceState(device_state, build_info, boot_variable_values,
|
|||||||
|
|
||||||
def UpdatePartitionStates(partition_states):
|
def UpdatePartitionStates(partition_states):
|
||||||
"""Update the per-partition state according to its build.prop"""
|
"""Update the per-partition state according to its build.prop"""
|
||||||
|
if not build_info.is_ab:
|
||||||
|
return
|
||||||
build_info_set = ComputeRuntimeBuildInfos(build_info,
|
build_info_set = ComputeRuntimeBuildInfos(build_info,
|
||||||
boot_variable_values)
|
boot_variable_values)
|
||||||
for partition in PARTITIONS_WITH_CARE_MAP:
|
assert "ab_partitions" in build_info.info_dict,\
|
||||||
|
"ab_partitions property required for ab update."
|
||||||
|
ab_partitions = set(build_info.info_dict.get("ab_partitions"))
|
||||||
|
|
||||||
|
# delta_generator will error out on unused timestamps,
|
||||||
|
# so only generate timestamps for dynamic partitions
|
||||||
|
# used in OTA update.
|
||||||
|
for partition in sorted(set(PARTITIONS_WITH_CARE_MAP) & ab_partitions):
|
||||||
partition_prop = build_info.info_dict.get(
|
partition_prop = build_info.info_dict.get(
|
||||||
'{}.build.prop'.format(partition))
|
'{}.build.prop'.format(partition))
|
||||||
# Skip if the partition is missing, or it doesn't have a build.prop
|
# Skip if the partition is missing, or it doesn't have a build.prop
|
||||||
|
@@ -30,7 +30,7 @@ from ota_from_target_files import (
|
|||||||
GetTargetFilesZipForSecondaryImages,
|
GetTargetFilesZipForSecondaryImages,
|
||||||
GetTargetFilesZipWithoutPostinstallConfig,
|
GetTargetFilesZipWithoutPostinstallConfig,
|
||||||
Payload, PayloadSigner, POSTINSTALL_CONFIG,
|
Payload, PayloadSigner, POSTINSTALL_CONFIG,
|
||||||
StreamingPropertyFiles)
|
StreamingPropertyFiles, AB_PARTITIONS)
|
||||||
from test_utils import PropertyFilesTestCase
|
from test_utils import PropertyFilesTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -179,6 +179,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
def test_GetPackageMetadata_abOta_full(self):
|
def test_GetPackageMetadata_abOta_full(self):
|
||||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||||
target_info_dict['ab_update'] = 'true'
|
target_info_dict['ab_update'] = 'true'
|
||||||
|
target_info_dict['ab_partitions'] = []
|
||||||
target_info = common.BuildInfo(target_info_dict, None)
|
target_info = common.BuildInfo(target_info_dict, None)
|
||||||
metadata = self.GetLegacyOtaMetadata(target_info)
|
metadata = self.GetLegacyOtaMetadata(target_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
@@ -197,6 +198,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
def test_GetPackageMetadata_abOta_incremental(self):
|
def test_GetPackageMetadata_abOta_incremental(self):
|
||||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||||
target_info_dict['ab_update'] = 'true'
|
target_info_dict['ab_update'] = 'true'
|
||||||
|
target_info_dict['ab_partitions'] = []
|
||||||
target_info = common.BuildInfo(target_info_dict, None)
|
target_info = common.BuildInfo(target_info_dict, None)
|
||||||
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
||||||
common.OPTIONS.incremental_source = ''
|
common.OPTIONS.incremental_source = ''
|
||||||
@@ -904,7 +906,8 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
|
with zipfile.ZipFile(zip_file, 'r') as zip_fp:
|
||||||
raw_metadata = property_files.GetPropertyFilesString(
|
raw_metadata = property_files.GetPropertyFilesString(
|
||||||
zip_fp, reserve_space=False)
|
zip_fp, reserve_space=False)
|
||||||
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
|
property_files_string = property_files.Finalize(
|
||||||
|
zip_fp, len(raw_metadata))
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
# "7" includes the four entries above, two metadata entries, and one entry
|
# "7" includes the four entries above, two metadata entries, and one entry
|
||||||
@@ -1198,6 +1201,7 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'recovery_api_version=3',
|
'recovery_api_version=3',
|
||||||
'fstab_version=2',
|
'fstab_version=2',
|
||||||
'recovery_as_boot=true',
|
'recovery_as_boot=true',
|
||||||
|
'ab_update=true',
|
||||||
]
|
]
|
||||||
|
|
||||||
BUILD_PROP = [
|
BUILD_PROP = [
|
||||||
@@ -1359,6 +1363,7 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'ro.product.vendor.name=vendor-product-std',
|
'ro.product.vendor.name=vendor-product-std',
|
||||||
'VENDOR/etc/build_pro.prop':
|
'VENDOR/etc/build_pro.prop':
|
||||||
'ro.product.vendor.name=vendor-product-pro',
|
'ro.product.vendor.name=vendor-product-pro',
|
||||||
|
AB_PARTITIONS: '\n'.join(['system', 'vendor']),
|
||||||
}, self.test_dir)
|
}, self.test_dir)
|
||||||
|
|
||||||
common.OPTIONS.boot_variable_file = common.MakeTempFile()
|
common.OPTIONS.boot_variable_file = common.MakeTempFile()
|
||||||
@@ -1410,6 +1415,8 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'import /vendor/etc/build_${ro.boot.sku_name}.prop',
|
'import /vendor/etc/build_${ro.boot.sku_name}.prop',
|
||||||
])
|
])
|
||||||
self.writeFiles({
|
self.writeFiles({
|
||||||
|
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
|
||||||
|
'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
|
||||||
'SYSTEM/build.prop': '\n'.join(self.BUILD_PROP),
|
'SYSTEM/build.prop': '\n'.join(self.BUILD_PROP),
|
||||||
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
|
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
|
||||||
'VENDOR/etc/build_std.prop':
|
'VENDOR/etc/build_std.prop':
|
||||||
@@ -1446,6 +1453,7 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
]
|
]
|
||||||
self.writeFiles({
|
self.writeFiles({
|
||||||
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
|
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
|
||||||
|
'META/ab_partitions.txt': '\n'.join(['system', 'vendor', 'product']),
|
||||||
'SYSTEM/build.prop': '\n'.join(source_build_prop),
|
'SYSTEM/build.prop': '\n'.join(source_build_prop),
|
||||||
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
|
'VENDOR/build.prop': '\n'.join(vendor_build_prop),
|
||||||
'VENDOR/etc/build_std.prop':
|
'VENDOR/etc/build_std.prop':
|
||||||
|
Reference in New Issue
Block a user