Define the protobuf for OTA metadata
Background in http://go/android-partial-updates. For partial update (e.g. system-only) or devices with mixed build, the current fingerprint & device name no longer suffice as the precondition to install the package. Therefore, we need to additionally include the per-partition build props into the ota metadata. We also define a protobuf for the metadata so it can be extended later. The metadata of the legacy format is also kept for backward compatibility. Bug: 151088567 Test: unittest pass, generate an OTA and check the result Change-Id: I716f7da54a393cd340280dbddc3c92b3460f8ef8
This commit is contained in:
@@ -89,16 +89,35 @@ python_defaults {
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
python_library_host {
|
||||||
|
name: "ota_metadata_proto",
|
||||||
|
version: {
|
||||||
|
py2: {
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
py3: {
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
srcs: [
|
||||||
|
"ota_metadata.proto",
|
||||||
|
],
|
||||||
|
proto: {
|
||||||
|
canonical_path_from_root: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
python_defaults {
|
python_defaults {
|
||||||
name: "releasetools_ota_from_target_files_defaults",
|
name: "releasetools_ota_from_target_files_defaults",
|
||||||
srcs: [
|
srcs: [
|
||||||
"edify_generator.py",
|
"edify_generator.py",
|
||||||
"ota_from_target_files.py",
|
|
||||||
"non_ab_ota.py",
|
"non_ab_ota.py",
|
||||||
"target_files_diff.py",
|
"ota_from_target_files.py",
|
||||||
"ota_utils.py",
|
"ota_utils.py",
|
||||||
|
"target_files_diff.py",
|
||||||
],
|
],
|
||||||
libs: [
|
libs: [
|
||||||
|
"ota_metadata_proto",
|
||||||
"releasetools_check_target_files_vintf",
|
"releasetools_check_target_files_vintf",
|
||||||
"releasetools_common",
|
"releasetools_common",
|
||||||
"releasetools_verity_utils",
|
"releasetools_verity_utils",
|
||||||
|
@@ -276,7 +276,7 @@ endif;
|
|||||||
|
|
||||||
script.SetProgress(1)
|
script.SetProgress(1)
|
||||||
script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
|
script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
|
||||||
metadata["ota-required-cache"] = str(script.required_cache)
|
metadata.required_cache = script.required_cache
|
||||||
|
|
||||||
# We haven't written the metadata entry, which will be done in
|
# We haven't written the metadata entry, which will be done in
|
||||||
# FinalizeMetadata.
|
# FinalizeMetadata.
|
||||||
@@ -530,7 +530,7 @@ endif;
|
|||||||
script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
|
script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
|
||||||
else:
|
else:
|
||||||
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
|
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
|
||||||
metadata["ota-required-cache"] = str(script.required_cache)
|
metadata.required_cache = script.required_cache
|
||||||
|
|
||||||
# We haven't written the metadata entry yet, which will be handled in
|
# We haven't written the metadata entry yet, which will be handled in
|
||||||
# FinalizeMetadata().
|
# FinalizeMetadata().
|
||||||
|
@@ -848,7 +848,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
|
|||||||
if OPTIONS.downgrade:
|
if OPTIONS.downgrade:
|
||||||
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
|
||||||
else:
|
else:
|
||||||
max_timestamp = metadata["post-timestamp"]
|
max_timestamp = str(metadata.postcondition.timestamp)
|
||||||
additional_args = ["--max_timestamp", max_timestamp]
|
additional_args = ["--max_timestamp", max_timestamp]
|
||||||
|
|
||||||
payload.Generate(target_file, source_file, additional_args)
|
payload.Generate(target_file, source_file, additional_args)
|
||||||
|
88
tools/releasetools/ota_metadata.proto
Normal file
88
tools/releasetools/ota_metadata.proto
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (C) 2020 The Android Open Source Project
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package build.tools.releasetools;
|
||||||
|
option optimize_for = LITE_RUNTIME;
|
||||||
|
|
||||||
|
// The build information of a particular partition on the device.
|
||||||
|
message PartitionState {
|
||||||
|
string partition_name = 1;
|
||||||
|
repeated string device = 2;
|
||||||
|
repeated string build = 3;
|
||||||
|
// The version string of the partition. It's usually timestamp if present.
|
||||||
|
// One known exception is the boot image, who uses the kmi version, e.g.
|
||||||
|
// 5.4.42-android12-0
|
||||||
|
string version = 4;
|
||||||
|
|
||||||
|
// TODO(xunchang), revisit other necessary fields, e.g. security_patch_level.
|
||||||
|
}
|
||||||
|
|
||||||
|
// The build information on the device. The bytes of the running images are thus
|
||||||
|
// inferred from the device state. For more information of the meaning of each
|
||||||
|
// subfield, check
|
||||||
|
// https://source.android.com/compatibility/android-cdd#3_2_2_build_parameters
|
||||||
|
message DeviceState {
|
||||||
|
// device name. i.e. ro.product.device; if the field has multiple values, it
|
||||||
|
// means the ota package supports multiple devices. This usually happens when
|
||||||
|
// we use the same image to support multiple skus.
|
||||||
|
repeated string device = 1;
|
||||||
|
// device fingerprint. Up to R build, the value reads from
|
||||||
|
// ro.build.fingerprint.
|
||||||
|
repeated string build = 2;
|
||||||
|
// A value that specify a version of the android build.
|
||||||
|
string build_incremental = 3;
|
||||||
|
// The timestamp when the build is generated.
|
||||||
|
int64 timestamp = 4;
|
||||||
|
// The version of the currently-executing Android system.
|
||||||
|
string sdk_level = 5;
|
||||||
|
// A value indicating the security patch level of a build.
|
||||||
|
string security_patch_level = 6;
|
||||||
|
|
||||||
|
// The detailed state of each partition. For partial updates or devices with
|
||||||
|
// mixed build of partitions, some of the above fields may left empty. And the
|
||||||
|
// client will rely on the information of specific partitions to target the
|
||||||
|
// update.
|
||||||
|
repeated PartitionState partition_state = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The metadata of an OTA package. It contains the information of the package
|
||||||
|
// and prerequisite to install the update correctly.
|
||||||
|
message OtaMetadata {
|
||||||
|
enum OtaType {
|
||||||
|
AB = 0;
|
||||||
|
BLOCK = 1;
|
||||||
|
};
|
||||||
|
OtaType type = 1;
|
||||||
|
// True if we need to wipe after the update.
|
||||||
|
bool wipe = 2;
|
||||||
|
// True if the timestamp of the post build is older than the pre build.
|
||||||
|
bool downgrade = 3;
|
||||||
|
// A map of name:content of property files, e.g. ota-property-files.
|
||||||
|
map<string, string> property_files = 4;
|
||||||
|
|
||||||
|
// The required device state in order to install the package.
|
||||||
|
DeviceState precondition = 5;
|
||||||
|
// The expected device state after the update.
|
||||||
|
DeviceState postcondition = 6;
|
||||||
|
|
||||||
|
// True if the ota that updates a device to support dynamic partitions, where
|
||||||
|
// the source build doesn't support it.
|
||||||
|
bool retrofit_dynamic_partitions = 7;
|
||||||
|
// The required size of the cache partition, only valid for non-A/B update.
|
||||||
|
int64 required_cache = 8;
|
||||||
|
}
|
@@ -17,6 +17,7 @@ import itertools
|
|||||||
import os
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
import ota_metadata_pb2
|
||||||
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
|
from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
|
||||||
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
|
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
|
||||||
SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
|
SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
|
||||||
@@ -34,6 +35,7 @@ OPTIONS.output_metadata_path = None
|
|||||||
OPTIONS.boot_variable_file = None
|
OPTIONS.boot_variable_file = None
|
||||||
|
|
||||||
METADATA_NAME = 'META-INF/com/android/metadata'
|
METADATA_NAME = 'META-INF/com/android/metadata'
|
||||||
|
METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
|
||||||
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
|
UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
|
||||||
|
|
||||||
|
|
||||||
@@ -62,11 +64,12 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
|
|||||||
# Write the current metadata entry with placeholders.
|
# Write the current metadata entry with placeholders.
|
||||||
with zipfile.ZipFile(input_file) as input_zip:
|
with zipfile.ZipFile(input_file) as input_zip:
|
||||||
for property_files in needed_property_files:
|
for property_files in needed_property_files:
|
||||||
metadata[property_files.name] = property_files.Compute(input_zip)
|
metadata.property_files[property_files.name] = property_files.Compute(
|
||||||
|
input_zip)
|
||||||
namelist = input_zip.namelist()
|
namelist = input_zip.namelist()
|
||||||
|
|
||||||
if METADATA_NAME in namelist:
|
if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
|
||||||
ZipDelete(input_file, METADATA_NAME)
|
ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
|
||||||
output_zip = zipfile.ZipFile(input_file, 'a')
|
output_zip = zipfile.ZipFile(input_file, 'a')
|
||||||
WriteMetadata(metadata, output_zip)
|
WriteMetadata(metadata, output_zip)
|
||||||
ZipClose(output_zip)
|
ZipClose(output_zip)
|
||||||
@@ -81,8 +84,9 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
|
|||||||
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
|
def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
|
||||||
with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
|
with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
|
||||||
for property_files in needed_property_files:
|
for property_files in needed_property_files:
|
||||||
metadata[property_files.name] = property_files.Finalize(
|
metadata.property_files[property_files.name] = property_files.Finalize(
|
||||||
prelim_signing_zip, len(metadata[property_files.name]))
|
prelim_signing_zip,
|
||||||
|
len(metadata.property_files[property_files.name]))
|
||||||
|
|
||||||
# SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
|
# SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
|
||||||
# entries, as well as padding the entry headers. We do a preliminary signing
|
# entries, as well as padding the entry headers. We do a preliminary signing
|
||||||
@@ -103,7 +107,7 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
|
|||||||
FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
|
FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
|
||||||
|
|
||||||
# Replace the METADATA entry.
|
# Replace the METADATA entry.
|
||||||
ZipDelete(prelim_signing, METADATA_NAME)
|
ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
|
||||||
output_zip = zipfile.ZipFile(prelim_signing, 'a')
|
output_zip = zipfile.ZipFile(prelim_signing, 'a')
|
||||||
WriteMetadata(metadata, output_zip)
|
WriteMetadata(metadata, output_zip)
|
||||||
ZipClose(output_zip)
|
ZipClose(output_zip)
|
||||||
@@ -117,7 +121,8 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
|
|||||||
# Reopen the final signed zip to double check the streaming metadata.
|
# Reopen the final signed zip to double check the streaming metadata.
|
||||||
with zipfile.ZipFile(output_file) as output_zip:
|
with zipfile.ZipFile(output_file) as output_zip:
|
||||||
for property_files in needed_property_files:
|
for property_files in needed_property_files:
|
||||||
property_files.Verify(output_zip, metadata[property_files.name].strip())
|
property_files.Verify(
|
||||||
|
output_zip, metadata.property_files[property_files.name].strip())
|
||||||
|
|
||||||
# If requested, dump the metadata to a separate file.
|
# If requested, dump the metadata to a separate file.
|
||||||
output_metadata_path = OPTIONS.output_metadata_path
|
output_metadata_path = OPTIONS.output_metadata_path
|
||||||
@@ -125,30 +130,60 @@ def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
|
|||||||
WriteMetadata(metadata, output_metadata_path)
|
WriteMetadata(metadata, output_metadata_path)
|
||||||
|
|
||||||
|
|
||||||
def WriteMetadata(metadata, output):
|
def WriteMetadata(metadata_proto, output):
|
||||||
"""Writes the metadata to the zip archive or a file.
|
"""Writes the metadata to the zip archive or a file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
metadata: The metadata dict for the package.
|
metadata_proto: The metadata protobuf for the package.
|
||||||
output: A ZipFile object or a string of the output file path.
|
output: A ZipFile object or a string of the output file path. If a string
|
||||||
|
path is given, the metadata in the protobuf format will be written to
|
||||||
|
{output}.pb, e.g. ota_metadata.pb
|
||||||
"""
|
"""
|
||||||
|
|
||||||
value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
|
metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
|
||||||
|
legacy_metadata = "".join(["%s=%s\n" % kv for kv in
|
||||||
|
sorted(metadata_dict.items())])
|
||||||
if isinstance(output, zipfile.ZipFile):
|
if isinstance(output, zipfile.ZipFile):
|
||||||
ZipWriteStr(output, METADATA_NAME, value,
|
ZipWriteStr(output, METADATA_PROTO_NAME, metadata_proto.SerializeToString(),
|
||||||
|
compress_type=zipfile.ZIP_STORED)
|
||||||
|
ZipWriteStr(output, METADATA_NAME, legacy_metadata,
|
||||||
compress_type=zipfile.ZIP_STORED)
|
compress_type=zipfile.ZIP_STORED)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
with open('{}.pb'.format(output), 'w') as f:
|
||||||
|
f.write(metadata_proto.SerializeToString())
|
||||||
with open(output, 'w') as f:
|
with open(output, 'w') as f:
|
||||||
f.write(value)
|
f.write(legacy_metadata)
|
||||||
|
|
||||||
|
|
||||||
|
def UpdateDeviceState(device_state, build_info, boot_variable_values,
|
||||||
|
is_post_build):
|
||||||
|
"""Update the fields of the DeviceState proto with build info."""
|
||||||
|
|
||||||
|
build_devices, build_fingerprints = \
|
||||||
|
CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values)
|
||||||
|
device_state.device.extend(sorted(build_devices))
|
||||||
|
device_state.build.extend(sorted(build_fingerprints))
|
||||||
|
device_state.build_incremental = build_info.GetBuildProp(
|
||||||
|
'ro.build.version.incremental')
|
||||||
|
|
||||||
|
# TODO(xunchang) update the partition state
|
||||||
|
|
||||||
|
if is_post_build:
|
||||||
|
device_state.sdk_level = build_info.GetBuildProp(
|
||||||
|
'ro.build.version.sdk')
|
||||||
|
device_state.security_patch_level = build_info.GetBuildProp(
|
||||||
|
'ro.build.version.security_patch')
|
||||||
|
# Use the actual post-timestamp, even for a downgrade case.
|
||||||
|
device_state.timestamp = int(build_info.GetBuildProp('ro.build.date.utc'))
|
||||||
|
|
||||||
|
|
||||||
def GetPackageMetadata(target_info, source_info=None):
|
def GetPackageMetadata(target_info, source_info=None):
|
||||||
"""Generates and returns the metadata dict.
|
"""Generates and returns the metadata proto.
|
||||||
|
|
||||||
It generates a dict() that contains the info to be written into an OTA
|
It generates a ota_metadata protobuf that contains the info to be written
|
||||||
package (META-INF/com/android/metadata). It also handles the detection of
|
into an OTA package (META-INF/com/android/metadata.pb). It also handles the
|
||||||
downgrade / data wipe based on the global options.
|
detection of downgrade / data wipe based on the global options.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
target_info: The BuildInfo instance that holds the target build info.
|
target_info: The BuildInfo instance that holds the target build info.
|
||||||
@@ -156,66 +191,96 @@ def GetPackageMetadata(target_info, source_info=None):
|
|||||||
None if generating full OTA.
|
None if generating full OTA.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A dict to be written into package metadata entry.
|
A protobuf to be written into package metadata entry.
|
||||||
"""
|
"""
|
||||||
assert isinstance(target_info, BuildInfo)
|
assert isinstance(target_info, BuildInfo)
|
||||||
assert source_info is None or isinstance(source_info, BuildInfo)
|
assert source_info is None or isinstance(source_info, BuildInfo)
|
||||||
|
|
||||||
separator = '|'
|
|
||||||
|
|
||||||
boot_variable_values = {}
|
boot_variable_values = {}
|
||||||
if OPTIONS.boot_variable_file:
|
if OPTIONS.boot_variable_file:
|
||||||
d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
|
d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
|
||||||
for key, values in d.items():
|
for key, values in d.items():
|
||||||
boot_variable_values[key] = [val.strip() for val in values.split(',')]
|
boot_variable_values[key] = [val.strip() for val in values.split(',')]
|
||||||
|
|
||||||
post_build_devices, post_build_fingerprints = \
|
metadata_proto = ota_metadata_pb2.OtaMetadata()
|
||||||
CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
|
# TODO(xunchang) some fields, e.g. post-device isn't necessary. We can
|
||||||
metadata = {
|
# consider skipping them if they aren't used by clients.
|
||||||
'post-build': separator.join(sorted(post_build_fingerprints)),
|
UpdateDeviceState(metadata_proto.postcondition, target_info,
|
||||||
'post-build-incremental': target_info.GetBuildProp(
|
boot_variable_values, True)
|
||||||
'ro.build.version.incremental'),
|
|
||||||
'post-sdk-level': target_info.GetBuildProp(
|
|
||||||
'ro.build.version.sdk'),
|
|
||||||
'post-security-patch-level': target_info.GetBuildProp(
|
|
||||||
'ro.build.version.security_patch'),
|
|
||||||
}
|
|
||||||
|
|
||||||
if target_info.is_ab and not OPTIONS.force_non_ab:
|
if target_info.is_ab and not OPTIONS.force_non_ab:
|
||||||
metadata['ota-type'] = 'AB'
|
metadata_proto.type = ota_metadata_pb2.OtaMetadata.AB
|
||||||
metadata['ota-required-cache'] = '0'
|
metadata_proto.required_cache = 0
|
||||||
else:
|
else:
|
||||||
metadata['ota-type'] = 'BLOCK'
|
metadata_proto.type = ota_metadata_pb2.OtaMetadata.BLOCK
|
||||||
|
# cache requirement will be updated by the non-A/B codes.
|
||||||
|
|
||||||
if OPTIONS.wipe_user_data:
|
if OPTIONS.wipe_user_data:
|
||||||
metadata['ota-wipe'] = 'yes'
|
metadata_proto.wipe = True
|
||||||
|
|
||||||
if OPTIONS.retrofit_dynamic_partitions:
|
if OPTIONS.retrofit_dynamic_partitions:
|
||||||
metadata['ota-retrofit-dynamic-partitions'] = 'yes'
|
metadata_proto.retrofit_dynamic_partitions = True
|
||||||
|
|
||||||
is_incremental = source_info is not None
|
is_incremental = source_info is not None
|
||||||
if is_incremental:
|
if is_incremental:
|
||||||
pre_build_devices, pre_build_fingerprints = \
|
UpdateDeviceState(metadata_proto.precondition, source_info,
|
||||||
CalculateRuntimeDevicesAndFingerprints(source_info,
|
boot_variable_values, False)
|
||||||
boot_variable_values)
|
|
||||||
metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
|
|
||||||
metadata['pre-build-incremental'] = source_info.GetBuildProp(
|
|
||||||
'ro.build.version.incremental')
|
|
||||||
metadata['pre-device'] = separator.join(sorted(pre_build_devices))
|
|
||||||
else:
|
else:
|
||||||
metadata['pre-device'] = separator.join(sorted(post_build_devices))
|
metadata_proto.precondition.device.extend(
|
||||||
|
metadata_proto.postcondition.device)
|
||||||
# Use the actual post-timestamp, even for a downgrade case.
|
|
||||||
metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
|
|
||||||
|
|
||||||
# Detect downgrades and set up downgrade flags accordingly.
|
# Detect downgrades and set up downgrade flags accordingly.
|
||||||
if is_incremental:
|
if is_incremental:
|
||||||
HandleDowngradeMetadata(metadata, target_info, source_info)
|
HandleDowngradeMetadata(metadata_proto, target_info, source_info)
|
||||||
|
|
||||||
return metadata
|
return metadata_proto
|
||||||
|
|
||||||
|
|
||||||
def HandleDowngradeMetadata(metadata, target_info, source_info):
|
def BuildLegacyOtaMetadata(metadata_proto):
|
||||||
|
"""Converts the metadata proto to a legacy metadata dict.
|
||||||
|
|
||||||
|
This metadata dict is used to build the legacy metadata text file for
|
||||||
|
backward compatibility. We won't add new keys to the legacy metadata format.
|
||||||
|
If new information is needed, we should add it as a new field in OtaMetadata
|
||||||
|
proto definition.
|
||||||
|
"""
|
||||||
|
|
||||||
|
separator = '|'
|
||||||
|
|
||||||
|
metadata_dict = {}
|
||||||
|
if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
|
||||||
|
metadata_dict['ota-type'] = 'AB'
|
||||||
|
elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
|
||||||
|
metadata_dict['ota-type'] = 'BLOCK'
|
||||||
|
if metadata_proto.wipe:
|
||||||
|
metadata_dict['ota-wipe'] = 'yes'
|
||||||
|
if metadata_proto.retrofit_dynamic_partitions:
|
||||||
|
metadata_dict['ota-retrofit-dynamic-partitions'] = 'yes'
|
||||||
|
if metadata_proto.downgrade:
|
||||||
|
metadata_dict['ota-downgrade'] = 'yes'
|
||||||
|
|
||||||
|
metadata_dict['ota-required-cache'] = str(metadata_proto.required_cache)
|
||||||
|
|
||||||
|
post_build = metadata_proto.postcondition
|
||||||
|
metadata_dict['post-build'] = separator.join(post_build.build)
|
||||||
|
metadata_dict['post-build-incremental'] = post_build.build_incremental
|
||||||
|
metadata_dict['post-sdk-level'] = post_build.sdk_level
|
||||||
|
metadata_dict['post-security-patch-level'] = post_build.security_patch_level
|
||||||
|
metadata_dict['post-timestamp'] = str(post_build.timestamp)
|
||||||
|
|
||||||
|
pre_build = metadata_proto.precondition
|
||||||
|
metadata_dict['pre-device'] = separator.join(pre_build.device)
|
||||||
|
# incremental updates
|
||||||
|
if len(pre_build.build) != 0:
|
||||||
|
metadata_dict['pre-build'] = separator.join(pre_build.build)
|
||||||
|
metadata_dict['pre-build-incremental'] = pre_build.build_incremental
|
||||||
|
|
||||||
|
metadata_dict.update(metadata_proto.property_files)
|
||||||
|
|
||||||
|
return metadata_dict
|
||||||
|
|
||||||
|
|
||||||
|
def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
|
||||||
# Only incremental OTAs are allowed to reach here.
|
# Only incremental OTAs are allowed to reach here.
|
||||||
assert OPTIONS.incremental_source is not None
|
assert OPTIONS.incremental_source is not None
|
||||||
|
|
||||||
@@ -228,7 +293,7 @@ def HandleDowngradeMetadata(metadata, target_info, source_info):
|
|||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"--downgrade or --override_timestamp specified but no downgrade "
|
"--downgrade or --override_timestamp specified but no downgrade "
|
||||||
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
|
"detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
|
||||||
metadata["ota-downgrade"] = "yes"
|
metadata_proto.downgrade = True
|
||||||
else:
|
else:
|
||||||
if is_downgrade:
|
if is_downgrade:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
@@ -415,8 +480,10 @@ class PropertyFiles(object):
|
|||||||
# reserved space serves the metadata entry only.
|
# reserved space serves the metadata entry only.
|
||||||
if reserve_space:
|
if reserve_space:
|
||||||
tokens.append('metadata:' + ' ' * 15)
|
tokens.append('metadata:' + ' ' * 15)
|
||||||
|
tokens.append('metadata.pb:' + ' ' * 15)
|
||||||
else:
|
else:
|
||||||
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
|
tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
|
||||||
|
tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
|
||||||
|
|
||||||
return ','.join(tokens)
|
return ','.join(tokens)
|
||||||
|
|
||||||
|
@@ -42,12 +42,13 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Compute(zip_fp)
|
property_files_string = property_files.Compute(zip_fp)
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
self.assertEqual(1, len(tokens))
|
self.assertEqual(2, len(tokens))
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Finalize(self):
|
def test_Finalize(self):
|
||||||
entries = [
|
entries = [
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
]
|
]
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = NonAbOtaPropertyFiles()
|
property_files = NonAbOtaPropertyFiles()
|
||||||
@@ -57,14 +58,16 @@ class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
|
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
|
|
||||||
self.assertEqual(1, len(tokens))
|
self.assertEqual(2, len(tokens))
|
||||||
# 'META-INF/com/android/metadata' will be key'd as 'metadata'.
|
# 'META-INF/com/android/metadata' will be key'd as 'metadata'.
|
||||||
entries[0] = 'metadata'
|
entries[0] = 'metadata'
|
||||||
|
entries[1] = 'metadata.pb'
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Verify(self):
|
def test_Verify(self):
|
||||||
entries = (
|
entries = (
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
)
|
)
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = NonAbOtaPropertyFiles()
|
property_files = NonAbOtaPropertyFiles()
|
||||||
|
@@ -20,17 +20,20 @@ import os.path
|
|||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
import common
|
import common
|
||||||
|
import ota_metadata_pb2
|
||||||
import test_utils
|
import test_utils
|
||||||
from ota_utils import CalculateRuntimeDevicesAndFingerprints
|
from ota_utils import (
|
||||||
|
BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
|
||||||
|
FinalizeMetadata, GetPackageMetadata, PropertyFiles)
|
||||||
from ota_from_target_files import (
|
from ota_from_target_files import (
|
||||||
_LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
|
_LoadOemDicts, AbOtaPropertyFiles,
|
||||||
GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
|
GetTargetFilesZipForSecondaryImages,
|
||||||
GetTargetFilesZipWithoutPostinstallConfig,
|
GetTargetFilesZipWithoutPostinstallConfig,
|
||||||
Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
|
Payload, PayloadSigner, POSTINSTALL_CONFIG,
|
||||||
StreamingPropertyFiles)
|
StreamingPropertyFiles)
|
||||||
from non_ab_ota import NonAbOtaPropertyFiles
|
|
||||||
from test_utils import PropertyFilesTestCase
|
from test_utils import PropertyFilesTestCase
|
||||||
|
|
||||||
|
|
||||||
def construct_target_files(secondary=False):
|
def construct_target_files(secondary=False):
|
||||||
"""Returns a target-files.zip file for generating OTA packages."""
|
"""Returns a target-files.zip file for generating OTA packages."""
|
||||||
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
|
||||||
@@ -150,7 +153,6 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
|
'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.testdata_dir = test_utils.get_testdata_dir()
|
self.testdata_dir = test_utils.get_testdata_dir()
|
||||||
self.assertTrue(os.path.exists(self.testdata_dir))
|
self.assertTrue(os.path.exists(self.testdata_dir))
|
||||||
@@ -169,11 +171,16 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
|
|
||||||
common.OPTIONS.search_path = test_utils.get_search_path()
|
common.OPTIONS.search_path = test_utils.get_search_path()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def GetLegacyOtaMetadata(target_info, source_info=None):
|
||||||
|
metadata_proto = GetPackageMetadata(target_info, source_info)
|
||||||
|
return BuildLegacyOtaMetadata(metadata_proto)
|
||||||
|
|
||||||
def test_GetPackageMetadata_abOta_full(self):
|
def test_GetPackageMetadata_abOta_full(self):
|
||||||
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
|
||||||
target_info_dict['ab_update'] = 'true'
|
target_info_dict['ab_update'] = 'true'
|
||||||
target_info = common.BuildInfo(target_info_dict, None)
|
target_info = common.BuildInfo(target_info_dict, None)
|
||||||
metadata = GetPackageMetadata(target_info)
|
metadata = self.GetLegacyOtaMetadata(target_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-type' : 'AB',
|
'ota-type' : 'AB',
|
||||||
@@ -193,7 +200,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
target_info = common.BuildInfo(target_info_dict, None)
|
target_info = common.BuildInfo(target_info_dict, None)
|
||||||
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
||||||
common.OPTIONS.incremental_source = ''
|
common.OPTIONS.incremental_source = ''
|
||||||
metadata = GetPackageMetadata(target_info, source_info)
|
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-type' : 'AB',
|
'ota-type' : 'AB',
|
||||||
@@ -211,10 +218,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
|
|
||||||
def test_GetPackageMetadata_nonAbOta_full(self):
|
def test_GetPackageMetadata_nonAbOta_full(self):
|
||||||
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
||||||
metadata = GetPackageMetadata(target_info)
|
metadata = self.GetLegacyOtaMetadata(target_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-type' : 'BLOCK',
|
'ota-type' : 'BLOCK',
|
||||||
|
'ota-required-cache' : '0',
|
||||||
'post-build' : 'build-fingerprint-target',
|
'post-build' : 'build-fingerprint-target',
|
||||||
'post-build-incremental' : 'build-version-incremental-target',
|
'post-build-incremental' : 'build-version-incremental-target',
|
||||||
'post-sdk-level' : '27',
|
'post-sdk-level' : '27',
|
||||||
@@ -228,10 +236,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
||||||
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
|
||||||
common.OPTIONS.incremental_source = ''
|
common.OPTIONS.incremental_source = ''
|
||||||
metadata = GetPackageMetadata(target_info, source_info)
|
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-type' : 'BLOCK',
|
'ota-type' : 'BLOCK',
|
||||||
|
'ota-required-cache' : '0',
|
||||||
'post-build' : 'build-fingerprint-target',
|
'post-build' : 'build-fingerprint-target',
|
||||||
'post-build-incremental' : 'build-version-incremental-target',
|
'post-build-incremental' : 'build-version-incremental-target',
|
||||||
'post-sdk-level' : '27',
|
'post-sdk-level' : '27',
|
||||||
@@ -246,10 +255,11 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
def test_GetPackageMetadata_wipe(self):
|
def test_GetPackageMetadata_wipe(self):
|
||||||
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
||||||
common.OPTIONS.wipe_user_data = True
|
common.OPTIONS.wipe_user_data = True
|
||||||
metadata = GetPackageMetadata(target_info)
|
metadata = self.GetLegacyOtaMetadata(target_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-type' : 'BLOCK',
|
'ota-type' : 'BLOCK',
|
||||||
|
'ota-required-cache' : '0',
|
||||||
'ota-wipe' : 'yes',
|
'ota-wipe' : 'yes',
|
||||||
'post-build' : 'build-fingerprint-target',
|
'post-build' : 'build-fingerprint-target',
|
||||||
'post-build-incremental' : 'build-version-incremental-target',
|
'post-build-incremental' : 'build-version-incremental-target',
|
||||||
@@ -263,11 +273,12 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
|
def test_GetPackageMetadata_retrofitDynamicPartitions(self):
|
||||||
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
|
||||||
common.OPTIONS.retrofit_dynamic_partitions = True
|
common.OPTIONS.retrofit_dynamic_partitions = True
|
||||||
metadata = GetPackageMetadata(target_info)
|
metadata = self.GetLegacyOtaMetadata(target_info)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-retrofit-dynamic-partitions' : 'yes',
|
'ota-retrofit-dynamic-partitions' : 'yes',
|
||||||
'ota-type' : 'BLOCK',
|
'ota-type' : 'BLOCK',
|
||||||
|
'ota-required-cache' : '0',
|
||||||
'post-build' : 'build-fingerprint-target',
|
'post-build' : 'build-fingerprint-target',
|
||||||
'post-build-incremental' : 'build-version-incremental-target',
|
'post-build-incremental' : 'build-version-incremental-target',
|
||||||
'post-sdk-level' : '27',
|
'post-sdk-level' : '27',
|
||||||
@@ -293,7 +304,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
target_info = common.BuildInfo(target_info_dict, None)
|
target_info = common.BuildInfo(target_info_dict, None)
|
||||||
source_info = common.BuildInfo(source_info_dict, None)
|
source_info = common.BuildInfo(source_info_dict, None)
|
||||||
common.OPTIONS.incremental_source = ''
|
common.OPTIONS.incremental_source = ''
|
||||||
self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
|
self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
|
||||||
source_info)
|
source_info)
|
||||||
|
|
||||||
def test_GetPackageMetadata_downgrade(self):
|
def test_GetPackageMetadata_downgrade(self):
|
||||||
@@ -307,11 +318,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
common.OPTIONS.incremental_source = ''
|
common.OPTIONS.incremental_source = ''
|
||||||
common.OPTIONS.downgrade = True
|
common.OPTIONS.downgrade = True
|
||||||
common.OPTIONS.wipe_user_data = True
|
common.OPTIONS.wipe_user_data = True
|
||||||
metadata = GetPackageMetadata(target_info, source_info)
|
metadata = self.GetLegacyOtaMetadata(target_info, source_info)
|
||||||
|
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
{
|
{
|
||||||
'ota-downgrade' : 'yes',
|
'ota-downgrade' : 'yes',
|
||||||
'ota-type' : 'BLOCK',
|
'ota-type' : 'BLOCK',
|
||||||
|
'ota-required-cache' : '0',
|
||||||
'ota-wipe' : 'yes',
|
'ota-wipe' : 'yes',
|
||||||
'post-build' : 'build-fingerprint-target',
|
'post-build' : 'build-fingerprint-target',
|
||||||
'post-build-incremental' : 'build-version-incremental-target',
|
'post-build-incremental' : 'build-version-incremental-target',
|
||||||
@@ -464,13 +477,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'A' * 1024 * 1024 * 1024,
|
'A' * 1024 * 1024 * 1024,
|
||||||
zipfile.ZIP_STORED)
|
zipfile.ZIP_STORED)
|
||||||
|
|
||||||
metadata = {}
|
metadata = ota_metadata_pb2.OtaMetadata()
|
||||||
output_file = common.MakeTempFile(suffix='.zip')
|
output_file = common.MakeTempFile(suffix='.zip')
|
||||||
needed_property_files = (
|
needed_property_files = (
|
||||||
TestPropertyFiles(),
|
TestPropertyFiles(),
|
||||||
)
|
)
|
||||||
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
|
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
|
||||||
self.assertIn('ota-test-property-files', metadata)
|
self.assertIn('ota-test-property-files', metadata.property_files)
|
||||||
|
|
||||||
@test_utils.SkipIfExternalToolsUnavailable()
|
@test_utils.SkipIfExternalToolsUnavailable()
|
||||||
def test_FinalizeMetadata(self):
|
def test_FinalizeMetadata(self):
|
||||||
@@ -508,13 +521,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'A' * 1024 * 1024,
|
'A' * 1024 * 1024,
|
||||||
zipfile.ZIP_STORED)
|
zipfile.ZIP_STORED)
|
||||||
|
|
||||||
metadata = {}
|
metadata = ota_metadata_pb2.OtaMetadata()
|
||||||
needed_property_files = (
|
needed_property_files = (
|
||||||
TestPropertyFiles(),
|
TestPropertyFiles(),
|
||||||
)
|
)
|
||||||
output_file = common.MakeTempFile(suffix='.zip')
|
output_file = common.MakeTempFile(suffix='.zip')
|
||||||
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
|
FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
|
||||||
self.assertIn('ota-test-property-files', metadata)
|
self.assertIn('ota-test-property-files', metadata.property_files)
|
||||||
|
|
||||||
|
|
||||||
class TestPropertyFiles(PropertyFiles):
|
class TestPropertyFiles(PropertyFiles):
|
||||||
@@ -532,8 +545,8 @@ class TestPropertyFiles(PropertyFiles):
|
|||||||
'optional-entry2',
|
'optional-entry2',
|
||||||
)
|
)
|
||||||
|
|
||||||
class PropertyFilesTest(PropertyFilesTestCase):
|
|
||||||
|
|
||||||
|
class PropertyFilesTest(PropertyFilesTestCase):
|
||||||
|
|
||||||
@test_utils.SkipIfExternalToolsUnavailable()
|
@test_utils.SkipIfExternalToolsUnavailable()
|
||||||
def test_Compute(self):
|
def test_Compute(self):
|
||||||
@@ -547,7 +560,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Compute(zip_fp)
|
property_files_string = property_files.Compute(zip_fp)
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
self.assertEqual(3, len(tokens))
|
self.assertEqual(4, len(tokens))
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Compute_withOptionalEntries(self):
|
def test_Compute_withOptionalEntries(self):
|
||||||
@@ -563,7 +576,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Compute(zip_fp)
|
property_files_string = property_files.Compute(zip_fp)
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
self.assertEqual(5, len(tokens))
|
self.assertEqual(6, len(tokens))
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Compute_missingRequiredEntry(self):
|
def test_Compute_missingRequiredEntry(self):
|
||||||
@@ -581,6 +594,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
'required-entry1',
|
'required-entry1',
|
||||||
'required-entry2',
|
'required-entry2',
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
]
|
]
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = TestPropertyFiles()
|
property_files = TestPropertyFiles()
|
||||||
@@ -590,10 +604,11 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
|
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
|
||||||
tokens = self._parse_property_files_string(streaming_metadata)
|
tokens = self._parse_property_files_string(streaming_metadata)
|
||||||
|
|
||||||
self.assertEqual(3, len(tokens))
|
self.assertEqual(4, len(tokens))
|
||||||
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
|
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
|
||||||
# streaming metadata.
|
# streaming metadata.
|
||||||
entries[2] = 'metadata'
|
entries[2] = 'metadata'
|
||||||
|
entries[3] = 'metadata.pb'
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
@test_utils.SkipIfExternalToolsUnavailable()
|
@test_utils.SkipIfExternalToolsUnavailable()
|
||||||
@@ -604,6 +619,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
'optional-entry1',
|
'optional-entry1',
|
||||||
'optional-entry2',
|
'optional-entry2',
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
)
|
)
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = TestPropertyFiles()
|
property_files = TestPropertyFiles()
|
||||||
@@ -638,6 +654,7 @@ class PropertyFilesTest(PropertyFilesTestCase):
|
|||||||
'optional-entry1',
|
'optional-entry1',
|
||||||
'optional-entry2',
|
'optional-entry2',
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
)
|
)
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = TestPropertyFiles()
|
property_files = TestPropertyFiles()
|
||||||
@@ -687,7 +704,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Compute(zip_fp)
|
property_files_string = property_files.Compute(zip_fp)
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
self.assertEqual(5, len(tokens))
|
self.assertEqual(6, len(tokens))
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Finalize(self):
|
def test_Finalize(self):
|
||||||
@@ -697,6 +714,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
'care_map.txt',
|
'care_map.txt',
|
||||||
'compatibility.zip',
|
'compatibility.zip',
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
]
|
]
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = StreamingPropertyFiles()
|
property_files = StreamingPropertyFiles()
|
||||||
@@ -706,10 +724,11 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
|
streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
|
||||||
tokens = self._parse_property_files_string(streaming_metadata)
|
tokens = self._parse_property_files_string(streaming_metadata)
|
||||||
|
|
||||||
self.assertEqual(5, len(tokens))
|
self.assertEqual(6, len(tokens))
|
||||||
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
|
# 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
|
||||||
# streaming metadata.
|
# streaming metadata.
|
||||||
entries[4] = 'metadata'
|
entries[4] = 'metadata'
|
||||||
|
entries[5] = 'metadata.pb'
|
||||||
self._verify_entries(zip_file, tokens, entries)
|
self._verify_entries(zip_file, tokens, entries)
|
||||||
|
|
||||||
def test_Verify(self):
|
def test_Verify(self):
|
||||||
@@ -719,6 +738,7 @@ class StreamingPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
'care_map.txt',
|
'care_map.txt',
|
||||||
'compatibility.zip',
|
'compatibility.zip',
|
||||||
'META-INF/com/android/metadata',
|
'META-INF/com/android/metadata',
|
||||||
|
'META-INF/com/android/metadata.pb',
|
||||||
)
|
)
|
||||||
zip_file = self.construct_zip_package(entries)
|
zip_file = self.construct_zip_package(entries)
|
||||||
property_files = StreamingPropertyFiles()
|
property_files = StreamingPropertyFiles()
|
||||||
@@ -855,6 +875,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
# Put META-INF/com/android/metadata if needed.
|
# Put META-INF/com/android/metadata if needed.
|
||||||
if with_metadata:
|
if with_metadata:
|
||||||
entries.append('META-INF/com/android/metadata')
|
entries.append('META-INF/com/android/metadata')
|
||||||
|
entries.append('META-INF/com/android/metadata.pb')
|
||||||
|
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
zip_fp.writestr(
|
zip_fp.writestr(
|
||||||
@@ -870,9 +891,9 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Compute(zip_fp)
|
property_files_string = property_files.Compute(zip_fp)
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
# "6" indcludes the four entries above, one metadata entry, and one entry
|
# "7" indcludes the four entries above, two metadata entries, and one entry
|
||||||
# for payload-metadata.bin.
|
# for payload-metadata.bin.
|
||||||
self.assertEqual(6, len(tokens))
|
self.assertEqual(7, len(tokens))
|
||||||
self._verify_entries(
|
self._verify_entries(
|
||||||
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
|
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
|
||||||
|
|
||||||
@@ -886,9 +907,9 @@ class AbOtaPropertyFilesTest(PropertyFilesTestCase):
|
|||||||
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
|
property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
|
||||||
|
|
||||||
tokens = self._parse_property_files_string(property_files_string)
|
tokens = self._parse_property_files_string(property_files_string)
|
||||||
# "6" indcludes the four entries above, one metadata entry, and one entry
|
# "7" includes the four entries above, two metadata entries, and one entry
|
||||||
# for payload-metadata.bin.
|
# for payload-metadata.bin.
|
||||||
self.assertEqual(6, len(tokens))
|
self.assertEqual(7, len(tokens))
|
||||||
self._verify_entries(
|
self._verify_entries(
|
||||||
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
|
zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
|
||||||
|
|
||||||
@@ -1187,10 +1208,29 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'ro.build.tags=build-tags',
|
'ro.build.tags=build-tags',
|
||||||
'ro.build.version.sdk=30',
|
'ro.build.version.sdk=30',
|
||||||
'ro.build.version.security_patch=2020',
|
'ro.build.version.security_patch=2020',
|
||||||
'ro.build.date.utc=12345678'
|
'ro.build.date.utc=12345678',
|
||||||
|
'ro.system.build.version.release=version-release',
|
||||||
|
'ro.system.build.id=build-id',
|
||||||
|
'ro.system.build.version.incremental=version-incremental',
|
||||||
|
'ro.system.build.type=build-type',
|
||||||
|
'ro.system.build.tags=build-tags',
|
||||||
|
'ro.system.build.version.sdk=30',
|
||||||
|
'ro.system.build.version.security_patch=2020',
|
||||||
|
'ro.system.build.date.utc=12345678',
|
||||||
|
'ro.product.system.brand=generic',
|
||||||
|
'ro.product.system.name=generic',
|
||||||
|
'ro.product.system.device=generic',
|
||||||
]
|
]
|
||||||
|
|
||||||
VENDOR_BUILD_PROP = [
|
VENDOR_BUILD_PROP = [
|
||||||
|
'ro.vendor.build.version.release=version-release',
|
||||||
|
'ro.vendor.build.id=build-id',
|
||||||
|
'ro.vendor.build.version.incremental=version-incremental',
|
||||||
|
'ro.vendor.build.type=build-type',
|
||||||
|
'ro.vendor.build.tags=build-tags',
|
||||||
|
'ro.vendor.build.version.sdk=30',
|
||||||
|
'ro.vendor.build.version.security_patch=2020',
|
||||||
|
'ro.vendor.build.date.utc=12345678',
|
||||||
'ro.product.vendor.brand=vendor-product-brand',
|
'ro.product.vendor.brand=vendor-product-brand',
|
||||||
'ro.product.vendor.name=vendor-product-name',
|
'ro.product.vendor.name=vendor-product-name',
|
||||||
'ro.product.vendor.device=vendor-product-device'
|
'ro.product.vendor.device=vendor-product-device'
|
||||||
@@ -1326,8 +1366,8 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
f.write('ro.boot.sku_name=std,pro')
|
f.write('ro.boot.sku_name=std,pro')
|
||||||
|
|
||||||
build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
|
build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
|
||||||
metadata = GetPackageMetadata(build_info)
|
metadata_dict = BuildLegacyOtaMetadata(GetPackageMetadata(build_info))
|
||||||
self.assertEqual('vendor-product-device', metadata['pre-device'])
|
self.assertEqual('vendor-product-device', metadata_dict['pre-device'])
|
||||||
fingerprints = [
|
fingerprints = [
|
||||||
self.constructFingerprint(
|
self.constructFingerprint(
|
||||||
'vendor-product-brand/vendor-product-name/vendor-product-device'),
|
'vendor-product-brand/vendor-product-name/vendor-product-device'),
|
||||||
@@ -1336,7 +1376,33 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
self.constructFingerprint(
|
self.constructFingerprint(
|
||||||
'vendor-product-brand/vendor-product-std/vendor-product-device'),
|
'vendor-product-brand/vendor-product-std/vendor-product-device'),
|
||||||
]
|
]
|
||||||
self.assertEqual('|'.join(fingerprints), metadata['post-build'])
|
self.assertEqual('|'.join(fingerprints), metadata_dict['post-build'])
|
||||||
|
|
||||||
|
def CheckMetadataEqual(self, metadata_dict, metadata_proto):
|
||||||
|
post_build = metadata_proto.postcondition
|
||||||
|
self.assertEqual('|'.join(post_build.build),
|
||||||
|
metadata_dict['post-build'])
|
||||||
|
self.assertEqual(post_build.build_incremental,
|
||||||
|
metadata_dict['post-build-incremental'])
|
||||||
|
self.assertEqual(post_build.sdk_level,
|
||||||
|
metadata_dict['post-sdk-level'])
|
||||||
|
self.assertEqual(post_build.security_patch_level,
|
||||||
|
metadata_dict['post-security-patch-level'])
|
||||||
|
|
||||||
|
if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
|
||||||
|
ota_type = 'AB'
|
||||||
|
elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
|
||||||
|
ota_type = 'BLOCK'
|
||||||
|
else:
|
||||||
|
ota_type = ''
|
||||||
|
self.assertEqual(ota_type, metadata_dict['ota-type'])
|
||||||
|
self.assertEqual(metadata_proto.wipe,
|
||||||
|
metadata_dict.get('ota-wipe') == 'yes')
|
||||||
|
self.assertEqual(metadata_proto.required_cache,
|
||||||
|
int(metadata_dict.get('ota-required-cache', 0)))
|
||||||
|
self.assertEqual(metadata_proto.retrofit_dynamic_partitions,
|
||||||
|
metadata_dict.get(
|
||||||
|
'ota-retrofit-dynamic-partitions') == 'yes')
|
||||||
|
|
||||||
def test_GetPackageMetadata_incremental_package(self):
|
def test_GetPackageMetadata_incremental_package(self):
|
||||||
vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
|
vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
|
||||||
@@ -1365,7 +1431,18 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'ro.build.tags=build-tags',
|
'ro.build.tags=build-tags',
|
||||||
'ro.build.version.sdk=29',
|
'ro.build.version.sdk=29',
|
||||||
'ro.build.version.security_patch=2020',
|
'ro.build.version.security_patch=2020',
|
||||||
'ro.build.date.utc=12340000'
|
'ro.build.date.utc=12340000',
|
||||||
|
'ro.system.build.version.release=source-version-release',
|
||||||
|
'ro.system.build.id=source-build-id',
|
||||||
|
'ro.system.build.version.incremental=source-version-incremental',
|
||||||
|
'ro.system.build.type=build-type',
|
||||||
|
'ro.system.build.tags=build-tags',
|
||||||
|
'ro.system.build.version.sdk=29',
|
||||||
|
'ro.system.build.version.security_patch=2020',
|
||||||
|
'ro.system.build.date.utc=12340000',
|
||||||
|
'ro.product.system.brand=generic',
|
||||||
|
'ro.product.system.name=generic',
|
||||||
|
'ro.product.system.device=generic',
|
||||||
]
|
]
|
||||||
self.writeFiles({
|
self.writeFiles({
|
||||||
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
|
'META/misc_info.txt': '\n'.join(self.MISC_INFO),
|
||||||
@@ -1381,10 +1458,11 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
|
target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
|
||||||
source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
|
source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
|
||||||
|
|
||||||
metadata = GetPackageMetadata(target_info, source_info)
|
metadata_proto = GetPackageMetadata(target_info, source_info)
|
||||||
|
metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'vendor-device-pro|vendor-device-std|vendor-product-device',
|
'vendor-device-pro|vendor-device-std|vendor-product-device',
|
||||||
metadata['pre-device'])
|
metadata_dict['pre-device'])
|
||||||
suffix = ':source-version-release/source-build-id/' \
|
suffix = ':source-version-release/source-build-id/' \
|
||||||
'source-version-incremental:build-type/build-tags'
|
'source-version-incremental:build-type/build-tags'
|
||||||
pre_fingerprints = [
|
pre_fingerprints = [
|
||||||
@@ -1395,7 +1473,7 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
'vendor-product-brand/vendor-product-name/vendor-product-device'
|
'vendor-product-brand/vendor-product-name/vendor-product-device'
|
||||||
'{}'.format(suffix),
|
'{}'.format(suffix),
|
||||||
]
|
]
|
||||||
self.assertEqual('|'.join(pre_fingerprints), metadata['pre-build'])
|
self.assertEqual('|'.join(pre_fingerprints), metadata_dict['pre-build'])
|
||||||
|
|
||||||
post_fingerprints = [
|
post_fingerprints = [
|
||||||
self.constructFingerprint(
|
self.constructFingerprint(
|
||||||
@@ -1405,4 +1483,6 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase):
|
|||||||
self.constructFingerprint(
|
self.constructFingerprint(
|
||||||
'vendor-product-brand/vendor-product-name/vendor-product-device'),
|
'vendor-product-brand/vendor-product-name/vendor-product-device'),
|
||||||
]
|
]
|
||||||
self.assertEqual('|'.join(post_fingerprints), metadata['post-build'])
|
self.assertEqual('|'.join(post_fingerprints), metadata_dict['post-build'])
|
||||||
|
|
||||||
|
self.CheckMetadataEqual(metadata_dict, metadata_proto)
|
||||||
|
@@ -22,6 +22,7 @@ Utils for running unittests.
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
|
import re
|
||||||
import struct
|
import struct
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
@@ -224,13 +225,26 @@ class PropertyFilesTestCase(ReleaseToolsTestCase):
|
|||||||
input_fp.seek(offset)
|
input_fp.seek(offset)
|
||||||
if entry == 'metadata':
|
if entry == 'metadata':
|
||||||
expected = b'META-INF/COM/ANDROID/METADATA'
|
expected = b'META-INF/COM/ANDROID/METADATA'
|
||||||
|
elif entry == 'metadata.pb':
|
||||||
|
expected = b'META-INF/COM/ANDROID/METADATA-PB'
|
||||||
else:
|
else:
|
||||||
expected = entry.replace('.', '-').upper().encode()
|
expected = entry.replace('.', '-').upper().encode()
|
||||||
self.assertEqual(expected, input_fp.read(size))
|
self.assertEqual(expected, input_fp.read(size))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
testsuite = unittest.TestLoader().discover(
|
# We only want to run tests from the top level directory. Unfortunately the
|
||||||
os.path.dirname(os.path.realpath(__file__)))
|
# pattern option of unittest.discover, internally using fnmatch, doesn't
|
||||||
|
# provide a good API to filter the test files based on directory. So we do an
|
||||||
|
# os walk and load them manually.
|
||||||
|
test_modules = []
|
||||||
|
base_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
for dirpath, _, files in os.walk(base_path):
|
||||||
|
for fn in files:
|
||||||
|
if dirpath == base_path and re.match('test_.*\\.py$', fn):
|
||||||
|
test_modules.append(fn[:-3])
|
||||||
|
|
||||||
|
test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
|
||||||
|
|
||||||
# atest needs a verbosity level of >= 2 to correctly parse the result.
|
# atest needs a verbosity level of >= 2 to correctly parse the result.
|
||||||
unittest.TextTestRunner(verbosity=2).run(testsuite)
|
unittest.TextTestRunner(verbosity=2).run(test_suite)
|
||||||
|
Reference in New Issue
Block a user