Split the huge merge_target_files script into multiple files.

Bug: 221858722
Test: m otatools; Use to create merged builds
Test: atest --host releasetools_test
Change-Id: I5f932f160d3f6405b41a7721b1c75cc96749e77b
This commit is contained in:
Daniel Norman
2022-03-02 12:01:20 -08:00
parent 51005914bd
commit 2465fc8594
10 changed files with 1382 additions and 1200 deletions

View File

@@ -19,14 +19,20 @@ package {
filegroup {
name: "releasetools_merge_sources",
srcs: [
"merge_compatibility_checks.py",
"merge_dexopt.py",
"merge_meta.py",
"merge_target_files.py",
"merge_utils.py",
],
}
filegroup {
name: "releasetools_merge_tests",
srcs: [
"test_merge_target_files.py",
"test_merge_compatibility_checks.py",
"test_merge_meta.py",
"test_merge_utils.py",
],
}

View File

@@ -0,0 +1,206 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Compatibility checks that should be performed on merged target_files."""
import json
import logging
import os
from xml.etree import ElementTree
import apex_utils
import check_target_files_vintf
import common
import find_shareduid_violation
logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
def CheckCompatibility(target_files_dir, partition_map):
"""Runs various compatibility checks.
Returns a possibly-empty list of error messages.
"""
errors = []
errors.extend(CheckVintf(target_files_dir))
errors.extend(CheckShareduidViolation(target_files_dir, partition_map))
errors.extend(CheckApexDuplicatePackages(target_files_dir, partition_map))
# The remaining checks only use the following partitions:
partition_map = {
partition: path
for partition, path in partition_map.items()
if partition in ('system', 'system_ext', 'product', 'vendor', 'odm')
}
errors.extend(CheckInitRcFiles(target_files_dir, partition_map))
errors.extend(CheckCombinedSepolicy(target_files_dir, partition_map))
return errors
def CheckVintf(target_files_dir):
"""Check for any VINTF issues using check_vintf."""
errors = []
try:
if not check_target_files_vintf.CheckVintf(target_files_dir):
errors.append('Incompatible VINTF.')
except RuntimeError as err:
errors.append(str(err))
return errors
def CheckShareduidViolation(target_files_dir, partition_map):
"""Check for any APK sharedUserId violations across partition sets.
Writes results to META/shareduid_violation_modules.json to help
with followup debugging.
"""
errors = []
violation = find_shareduid_violation.FindShareduidViolation(
target_files_dir, partition_map)
shareduid_violation_modules = os.path.join(
target_files_dir, 'META', 'shareduid_violation_modules.json')
with open(shareduid_violation_modules, 'w') as f:
# Write the output to a file to enable debugging.
f.write(violation)
# Check for violations across the partition sets.
shareduid_errors = common.SharedUidPartitionViolations(
json.loads(violation),
[OPTIONS.framework_partition_set, OPTIONS.vendor_partition_set])
if shareduid_errors:
for error in shareduid_errors:
errors.append('APK sharedUserId error: %s' % error)
errors.append('See APK sharedUserId violations file: %s' %
shareduid_violation_modules)
return errors
def CheckInitRcFiles(target_files_dir, partition_map):
"""Check for any init.rc issues using host_init_verifier."""
try:
common.RunHostInitVerifier(
product_out=target_files_dir, partition_map=partition_map)
except RuntimeError as err:
return [str(err)]
return []
def CheckCombinedSepolicy(target_files_dir, partition_map, execute=True):
"""Uses secilc to compile a split sepolicy file.
Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
"""
errors = []
def get_file(partition, path):
if partition not in partition_map:
logger.warning('Cannot load SEPolicy files for missing partition %s',
partition)
return None
file_path = os.path.join(target_files_dir, partition_map[partition], path)
if os.path.exists(file_path):
return file_path
return None
# Load the kernel sepolicy version from the FCM. This is normally provided
# directly to selinux.cpp as a build flag, but is also available in this file.
fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
if not fcm_file:
errors.append('Missing required file for loading sepolicy: '
'/system/etc/vintf/compatibility_matrix.device.xml')
return errors
kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
'sepolicy/kernel-sepolicy-version').text
# Load the vendor's plat sepolicy version. This is the version used for
# locating sepolicy mapping files.
vendor_plat_version_file = get_file('vendor',
'etc/selinux/plat_sepolicy_vers.txt')
if not vendor_plat_version_file:
errors.append('Missing required sepolicy file %s' %
vendor_plat_version_file)
return errors
with open(vendor_plat_version_file) as f:
vendor_plat_version = f.read().strip()
# Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
cmd.extend(['-c', kernel_sepolicy_version])
cmd.extend(['-o', os.path.join(target_files_dir, 'META/combined_sepolicy')])
cmd.extend(['-f', '/dev/null'])
required_policy_files = (
('system', 'etc/selinux/plat_sepolicy.cil'),
('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
('vendor', 'etc/selinux/vendor_sepolicy.cil'),
('vendor', 'etc/selinux/plat_pub_versioned.cil'),
)
for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
required_policy_files)):
if not policy:
errors.append('Missing required sepolicy file %s' % policy)
return errors
cmd.append(policy)
optional_policy_files = (
('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
('product', 'etc/selinux/product_sepolicy.cil'),
('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
('odm', 'etc/selinux/odm_sepolicy.cil'),
)
for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
optional_policy_files)):
if policy:
cmd.append(policy)
try:
if execute:
common.RunAndCheckOutput(cmd)
else:
return cmd
except RuntimeError as err:
errors.append(str(err))
return errors
def CheckApexDuplicatePackages(target_files_dir, partition_map):
"""Checks if the same APEX package name is provided by multiple partitions."""
errors = []
apex_packages = set()
for partition in partition_map.keys():
try:
apex_info = apex_utils.GetApexInfoFromTargetFiles(
target_files_dir, partition, compressed_only=False)
except RuntimeError as err:
errors.append(str(err))
apex_info = []
partition_apex_packages = set([info.package_name for info in apex_info])
duplicates = apex_packages.intersection(partition_apex_packages)
if duplicates:
errors.append(
'Duplicate APEX package_names found in multiple partitions: %s' %
' '.join(duplicates))
apex_packages.update(partition_apex_packages)
return errors

View File

@@ -0,0 +1,322 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Generates dexopt files for vendor apps, from a merged target_files.
Expects items in OPTIONS prepared by merge_target_files.py.
"""
import glob
import json
import logging
import os
import shutil
import subprocess
import common
logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
def MergeDexopt(temp_dir, output_target_files_dir):
"""If needed, generates dexopt files for vendor apps.
Args:
temp_dir: Location containing an 'output' directory where target files have
been extracted, e.g. <temp_dir>/output/SYSTEM, <temp_dir>/output/IMAGES,
etc.
output_target_files_dir: The name of a directory that will be used to create
the output target files package after all the special cases are processed.
"""
# Load vendor and framework META/misc_info.txt.
if (OPTIONS.vendor_misc_info.get('building_with_vsdk') != 'true' or
OPTIONS.framework_dexpreopt_tools is None or
OPTIONS.framework_dexpreopt_config is None or
OPTIONS.vendor_dexpreopt_config is None):
return
logger.info('applying dexpreopt')
# The directory structure to apply dexpreopt is:
#
# <temp_dir>/
# framework_meta/
# META/
# vendor_meta/
# META/
# output/
# SYSTEM/
# VENDOR/
# IMAGES/
# <other items extracted from system and vendor target files>
# tools/
# <contents of dexpreopt_tools.zip>
# system_config/
# <contents of system dexpreopt_config.zip>
# vendor_config/
# <contents of vendor dexpreopt_config.zip>
# system -> output/SYSTEM
# vendor -> output/VENDOR
# apex -> output/SYSTEM/apex (only for flattened APEX builds)
# apex/ (extracted updatable APEX)
# <apex 1>/
# ...
# <apex 2>/
# ...
# ...
# out/dex2oat_result/vendor/
# <app>
# oat/arm64/
# package.vdex
# package.odex
# <priv-app>
# oat/arm64/
# package.vdex
# package.odex
dexpreopt_tools_files_temp_dir = os.path.join(temp_dir, 'tools')
dexpreopt_framework_config_files_temp_dir = os.path.join(
temp_dir, 'system_config')
dexpreopt_vendor_config_files_temp_dir = os.path.join(temp_dir,
'vendor_config')
extract_items(
input_zip=OPTIONS.framework_dexpreopt_tools,
output_dir=dexpreopt_tools_files_temp_dir,
extract_item_list=('*',))
extract_items(
input_zip=OPTIONS.framework_dexpreopt_config,
output_dir=dexpreopt_framework_config_files_temp_dir,
extract_item_list=('*',))
extract_items(
input_zip=OPTIONS.vendor_dexpreopt_config,
output_dir=dexpreopt_vendor_config_files_temp_dir,
extract_item_list=('*',))
os.symlink(
os.path.join(output_target_files_dir, 'SYSTEM'),
os.path.join(temp_dir, 'system'))
os.symlink(
os.path.join(output_target_files_dir, 'VENDOR'),
os.path.join(temp_dir, 'vendor'))
# The directory structure for flatteded APEXes is:
#
# SYSTEM
# apex
# <APEX name, e.g., com.android.wifi>
# apex_manifest.pb
# apex_pubkey
# etc/
# javalib/
# lib/
# lib64/
# priv-app/
#
# The directory structure for updatable APEXes is:
#
# SYSTEM
# apex
# com.android.adbd.apex
# com.android.appsearch.apex
# com.android.art.apex
# ...
apex_root = os.path.join(output_target_files_dir, 'SYSTEM', 'apex')
# Check for flattended versus updatable APEX.
if OPTIONS.framework_misc_info.get('target_flatten_apex') == 'false':
# Extract APEX.
logging.info('extracting APEX')
apex_extract_root_dir = os.path.join(temp_dir, 'apex')
os.makedirs(apex_extract_root_dir)
for apex in (glob.glob(os.path.join(apex_root, '*.apex')) +
glob.glob(os.path.join(apex_root, '*.capex'))):
logging.info(' apex: %s', apex)
# deapexer is in the same directory as the merge_target_files binary extracted
# from otatools.zip.
apex_json_info = subprocess.check_output(['deapexer', 'info', apex])
logging.info(' info: %s', apex_json_info)
apex_info = json.loads(apex_json_info)
apex_name = apex_info['name']
logging.info(' name: %s', apex_name)
apex_extract_dir = os.path.join(apex_extract_root_dir, apex_name)
os.makedirs(apex_extract_dir)
# deapexer uses debugfs_static, which is part of otatools.zip.
command = [
'deapexer',
'--debugfs_path',
'debugfs_static',
'extract',
apex,
apex_extract_dir,
]
logging.info(' running %s', command)
subprocess.check_call(command)
else:
# Flattened APEXes don't need to be extracted since they have the necessary
# directory structure.
os.symlink(os.path.join(apex_root), os.path.join(temp_dir, 'apex'))
# Modify system config to point to the tools that have been extracted.
# Absolute or .. paths are not allowed by the dexpreopt_gen tool in
# dexpreopt_soong.config.
dexpreopt_framework_soon_config = os.path.join(
dexpreopt_framework_config_files_temp_dir, 'dexpreopt_soong.config')
with open(dexpreopt_framework_soon_config, 'w') as f:
dexpreopt_soong_config = {
'Profman': 'tools/profman',
'Dex2oat': 'tools/dex2oatd',
'Aapt': 'tools/aapt2',
'SoongZip': 'tools/soong_zip',
'Zip2zip': 'tools/zip2zip',
'ManifestCheck': 'tools/manifest_check',
'ConstructContext': 'tools/construct_context',
}
json.dump(dexpreopt_soong_config, f)
# TODO(b/188179859): Make *dex location configurable to vendor or system_other.
use_system_other_odex = False
if use_system_other_odex:
dex_img = 'SYSTEM_OTHER'
else:
dex_img = 'VENDOR'
# Open vendor_filesystem_config to append the items generated by dexopt.
vendor_file_system_config = open(
os.path.join(temp_dir, 'output', 'META',
'vendor_filesystem_config.txt'), 'a')
# Dexpreopt vendor apps.
dexpreopt_config_suffix = '_dexpreopt.config'
for config in glob.glob(
os.path.join(dexpreopt_vendor_config_files_temp_dir,
'*' + dexpreopt_config_suffix)):
app = os.path.basename(config)[:-len(dexpreopt_config_suffix)]
logging.info('dexpreopt config: %s %s', config, app)
apk_dir = 'app'
apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
if not os.path.exists(apk_path):
apk_dir = 'priv-app'
apk_path = os.path.join(temp_dir, 'vendor', apk_dir, app, app + '.apk')
if not os.path.exists(apk_path):
logging.warning(
'skipping dexpreopt for %s, no apk found in vendor/app '
'or vendor/priv-app', app)
continue
# Generate dexpreopting script. Note 'out_dir' is not the output directory
# where the script is generated, but the OUT_DIR at build time referenced
# in the dexpreot config files, e.g., "out/.../core-oj.jar", so the tool knows
# how to adjust the path.
command = [
os.path.join(dexpreopt_tools_files_temp_dir, 'dexpreopt_gen'),
'-global',
os.path.join(dexpreopt_framework_config_files_temp_dir,
'dexpreopt.config'),
'-global_soong',
os.path.join(dexpreopt_framework_config_files_temp_dir,
'dexpreopt_soong.config'),
'-module',
config,
'-dexpreopt_script',
'dexpreopt_app.sh',
'-out_dir',
'out',
'-base_path',
'.',
'--uses_target_files',
]
# Run the command from temp_dir so all tool paths are its descendants.
logging.info('running %s', command)
subprocess.check_call(command, cwd=temp_dir)
# Call the generated script.
command = ['sh', 'dexpreopt_app.sh', apk_path]
logging.info('running %s', command)
subprocess.check_call(command, cwd=temp_dir)
# Output files are in:
#
# <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.vdex
# <temp_dir>/out/dex2oat_result/vendor/priv-app/<app>/oat/arm64/package.odex
# <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.vdex
# <temp_dir>/out/dex2oat_result/vendor/app/<app>/oat/arm64/package.odex
#
# Copy the files to their destination. The structure of system_other is:
#
# system_other/
# system-other-odex-marker
# system/
# app/
# <app>/oat/arm64/
# <app>.odex
# <app>.vdex
# ...
# priv-app/
# <app>/oat/arm64/
# <app>.odex
# <app>.vdex
# ...
# TODO(b/188179859): Support for other architectures.
arch = 'arm64'
dex_destination = os.path.join(temp_dir, 'output', dex_img, apk_dir, app,
'oat', arch)
os.makedirs(dex_destination)
dex2oat_path = os.path.join(temp_dir, 'out', 'dex2oat_result', 'vendor',
apk_dir, app, 'oat', arch)
shutil.copy(
os.path.join(dex2oat_path, 'package.vdex'),
os.path.join(dex_destination, app + '.vdex'))
shutil.copy(
os.path.join(dex2oat_path, 'package.odex'),
os.path.join(dex_destination, app + '.odex'))
# Append entries to vendor_file_system_config.txt, such as:
#
# vendor/app/<app>/oat 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
# vendor/app/<app>/oat/arm64 0 2000 755 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
# vendor/app/<app>/oat/arm64/<app>.odex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
# vendor/app/<app>/oat/arm64/<app>.vdex 0 0 644 selabel=u:object_r:vendor_app_file:s0 capabilities=0x0
if not use_system_other_odex:
vendor_app_prefix = 'vendor/' + apk_dir + '/' + app + '/oat'
selabel = 'selabel=u:object_r:vendor_app_file:s0 capabilities=0x0'
vendor_file_system_config.writelines([
vendor_app_prefix + ' 0 2000 755 ' + selabel + '\n',
vendor_app_prefix + '/' + arch + ' 0 2000 755 ' + selabel + '\n',
vendor_app_prefix + '/' + arch + '/' + app + '.odex 0 0 644 ' +
selabel + '\n',
vendor_app_prefix + '/' + arch + '/' + app + '.vdex 0 0 644 ' +
selabel + '\n',
])
if not use_system_other_odex:
vendor_file_system_config.close()
# Delete vendor.img so that it will be regenerated.
# TODO(b/188179859): Rebuilding a vendor image in GRF mode (e.g., T(framework)
# and S(vendor) may require logic similar to that in
# rebuild_image_with_sepolicy.
vendor_img = os.path.join(output_target_files_dir, 'IMAGES', 'vendor.img')
if os.path.exists(vendor_img):
logging.info('Deleting %s', vendor_img)
os.remove(vendor_img)

View File

@@ -0,0 +1,286 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Functions for merging META/* files from partial builds.
Expects items in OPTIONS prepared by merge_target_files.py.
"""
import logging
import os
import re
import shutil
import build_image
import common
import merge_utils
import sparse_img
import verity_utils
from common import ExternalError
logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
# In apexkeys.txt or apkcerts.txt, we will find partition tags on each entry in
# the file. We use these partition tags to filter the entries in those files
# from the two different target files packages to produce a merged apexkeys.txt
# or apkcerts.txt file. A partition tag (e.g., for the product partition) looks
# like this: 'partition="product"'. We use the group syntax grab the value of
# the tag. We use non-greedy matching in case there are other fields on the
# same line.
PARTITION_TAG_PATTERN = re.compile(r'partition="(.*?)"')
# The sorting algorithm for apexkeys.txt and apkcerts.txt does not include the
# ".apex" or ".apk" suffix, so we use the following pattern to extract a key.
MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
def MergeMetaFiles(temp_dir, merged_dir):
"""Merges various files in META/*."""
framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
merge_utils.ExtractItems(
input_zip=OPTIONS.framework_target_files,
output_dir=os.path.dirname(framework_meta_dir),
extract_item_list=('META/*',))
vendor_meta_dir = os.path.join(temp_dir, 'vendor_meta', 'META')
merge_utils.ExtractItems(
input_zip=OPTIONS.vendor_target_files,
output_dir=os.path.dirname(vendor_meta_dir),
extract_item_list=('META/*',))
merged_meta_dir = os.path.join(merged_dir, 'META')
# Merge META/misc_info.txt into OPTIONS.merged_misc_info,
# but do not write it yet. The following functions may further
# modify this dict.
OPTIONS.merged_misc_info = MergeMiscInfo(
framework_meta_dir=framework_meta_dir,
vendor_meta_dir=vendor_meta_dir,
merged_meta_dir=merged_meta_dir)
CopyNamedFileContexts(
framework_meta_dir=framework_meta_dir,
vendor_meta_dir=vendor_meta_dir,
merged_meta_dir=merged_meta_dir)
if OPTIONS.merged_misc_info.get('use_dynamic_partitions') == 'true':
MergeDynamicPartitionsInfo(
framework_meta_dir=framework_meta_dir,
vendor_meta_dir=vendor_meta_dir,
merged_meta_dir=merged_meta_dir)
if OPTIONS.merged_misc_info.get('ab_update') == 'true':
MergeAbPartitions(
framework_meta_dir=framework_meta_dir,
vendor_meta_dir=vendor_meta_dir,
merged_meta_dir=merged_meta_dir)
UpdateCareMapImageSizeProps(images_dir=os.path.join(merged_dir, 'IMAGES'))
for file_name in ('apkcerts.txt', 'apexkeys.txt'):
MergePackageKeys(
framework_meta_dir=framework_meta_dir,
vendor_meta_dir=vendor_meta_dir,
merged_meta_dir=merged_meta_dir,
file_name=file_name)
# Write the now-finalized OPTIONS.merged_misc_info.
merge_utils.WriteSortedData(
data=OPTIONS.merged_misc_info,
path=os.path.join(merged_meta_dir, 'misc_info.txt'))
def MergeAbPartitions(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
"""Merges META/ab_partitions.txt.
The output contains the union of the partition names.
"""
with open(os.path.join(framework_meta_dir, 'ab_partitions.txt')) as f:
framework_ab_partitions = f.read().splitlines()
with open(os.path.join(vendor_meta_dir, 'ab_partitions.txt')) as f:
vendor_ab_partitions = f.read().splitlines()
merge_utils.WriteSortedData(
data=set(framework_ab_partitions + vendor_ab_partitions),
path=os.path.join(merged_meta_dir, 'ab_partitions.txt'))
def MergeMiscInfo(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
"""Merges META/misc_info.txt.
The output contains a combination of key=value pairs from both inputs.
Most pairs are taken from the vendor input, while some are taken from
the framework input.
"""
OPTIONS.framework_misc_info = common.LoadDictionaryFromFile(
os.path.join(framework_meta_dir, 'misc_info.txt'))
OPTIONS.vendor_misc_info = common.LoadDictionaryFromFile(
os.path.join(vendor_meta_dir, 'misc_info.txt'))
# Merged misc info is a combination of vendor misc info plus certain values
# from the framework misc info.
merged_dict = OPTIONS.vendor_misc_info
for key in OPTIONS.framework_misc_info_keys:
merged_dict[key] = OPTIONS.framework_misc_info[key]
# If AVB is enabled then ensure that we build vbmeta.img.
# Partial builds with AVB enabled may set PRODUCT_BUILD_VBMETA_IMAGE=false to
# skip building an incomplete vbmeta.img.
if merged_dict.get('avb_enable') == 'true':
merged_dict['avb_building_vbmeta_image'] = 'true'
return merged_dict
def MergeDynamicPartitionsInfo(framework_meta_dir, vendor_meta_dir,
merged_meta_dir):
"""Merge META/dynamic_partitions_info.txt."""
framework_dynamic_partitions_dict = common.LoadDictionaryFromFile(
os.path.join(framework_meta_dir, 'dynamic_partitions_info.txt'))
vendor_dynamic_partitions_dict = common.LoadDictionaryFromFile(
os.path.join(vendor_meta_dir, 'dynamic_partitions_info.txt'))
merged_dynamic_partitions_dict = common.MergeDynamicPartitionInfoDicts(
framework_dict=framework_dynamic_partitions_dict,
vendor_dict=vendor_dynamic_partitions_dict)
merge_utils.WriteSortedData(
data=merged_dynamic_partitions_dict,
path=os.path.join(merged_meta_dir, 'dynamic_partitions_info.txt'))
# Merge misc info keys used for Dynamic Partitions.
OPTIONS.merged_misc_info.update(merged_dynamic_partitions_dict)
# Ensure that add_img_to_target_files rebuilds super split images for
# devices that retrofit dynamic partitions. This flag may have been set to
# false in the partial builds to prevent duplicate building of super.img.
OPTIONS.merged_misc_info['build_super_partition'] = 'true'
def MergePackageKeys(framework_meta_dir, vendor_meta_dir, merged_meta_dir,
file_name):
"""Merges APK/APEX key list files."""
if file_name not in ('apkcerts.txt', 'apexkeys.txt'):
raise ExternalError(
'Unexpected file_name provided to merge_package_keys_txt: %s',
file_name)
def read_helper(d):
temp = {}
with open(os.path.join(d, file_name)) as f:
for line in f.read().splitlines():
line = line.strip()
if line:
name_search = MODULE_KEY_PATTERN.search(line.split()[0])
temp[name_search.group(1)] = line
return temp
framework_dict = read_helper(framework_meta_dir)
vendor_dict = read_helper(vendor_meta_dir)
merged_dict = {}
def filter_into_merged_dict(item_dict, partition_set):
for key, value in item_dict.items():
tag_search = PARTITION_TAG_PATTERN.search(value)
if tag_search is None:
raise ValueError('Entry missing partition tag: %s' % value)
partition_tag = tag_search.group(1)
if partition_tag in partition_set:
if key in merged_dict:
if OPTIONS.allow_duplicate_apkapex_keys:
# TODO(b/150582573) Always raise on duplicates.
logger.warning('Duplicate key %s' % key)
continue
else:
raise ValueError('Duplicate key %s' % key)
merged_dict[key] = value
# Prioritize framework keys first.
# Duplicate keys from vendor are an error, or ignored.
filter_into_merged_dict(framework_dict, OPTIONS.framework_partition_set)
filter_into_merged_dict(vendor_dict, OPTIONS.vendor_partition_set)
# The following code is similar to WriteSortedData, but different enough
# that we couldn't use that function. We need the output to be sorted by the
# basename of the apex/apk (without the ".apex" or ".apk" suffix). This
# allows the sort to be consistent with the framework/vendor input data and
# eases comparison of input data with merged data.
with open(os.path.join(merged_meta_dir, file_name), 'w') as output:
for key, value in sorted(merged_dict.items()):
output.write(value + '\n')
def CopyNamedFileContexts(framework_meta_dir, vendor_meta_dir, merged_meta_dir):
"""Creates named copies of each partial build's file_contexts.bin.
Used when regenerating images from the partial build.
"""
def copy_fc_file(source_dir, file_name):
for name in (file_name, 'file_contexts.bin'):
fc_path = os.path.join(source_dir, name)
if os.path.exists(fc_path):
shutil.copyfile(fc_path, os.path.join(merged_meta_dir, file_name))
return
raise ValueError('Missing file_contexts file from %s: %s', source_dir,
file_name)
copy_fc_file(framework_meta_dir, 'framework_file_contexts.bin')
copy_fc_file(vendor_meta_dir, 'vendor_file_contexts.bin')
# Replace <image>_selinux_fc values with framework or vendor file_contexts.bin
# depending on which dictionary the key came from.
# Only the file basename is required because all selinux_fc properties are
# replaced with the full path to the file under META/ when misc_info.txt is
# loaded from target files for repacking. See common.py LoadInfoDict().
for key in OPTIONS.vendor_misc_info:
if key.endswith('_selinux_fc'):
OPTIONS.merged_misc_info[key] = 'vendor_file_contexts.bin'
for key in OPTIONS.framework_misc_info:
if key.endswith('_selinux_fc'):
OPTIONS.merged_misc_info[key] = 'framework_file_contexts.bin'
def UpdateCareMapImageSizeProps(images_dir):
"""Sets <partition>_image_size props in misc_info.
add_images_to_target_files uses these props to generate META/care_map.pb.
Regenerated images will have this property set during regeneration.
However, images copied directly from input partial target files packages
need this value calculated here.
"""
for partition in common.PARTITIONS_WITH_CARE_MAP:
image_path = os.path.join(images_dir, '{}.img'.format(partition))
if os.path.exists(image_path):
partition_size = sparse_img.GetImagePartitionSize(image_path)
image_props = build_image.ImagePropFromGlobalDict(
OPTIONS.merged_misc_info, partition)
verity_image_builder = verity_utils.CreateVerityImageBuilder(image_props)
image_size = verity_image_builder.CalculateMaxImageSize(partition_size)
OPTIONS.merged_misc_info['{}_image_size'.format(partition)] = image_size

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,187 @@
#!/usr/bin/env python
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""Common utility functions shared by merge_* scripts.
Expects items in OPTIONS prepared by merge_target_files.py.
"""
import fnmatch
import logging
import os
import re
import shutil
import zipfile
import common
logger = logging.getLogger(__name__)
OPTIONS = common.OPTIONS
def ExtractItems(input_zip, output_dir, extract_item_list):
"""Extracts items in extract_item_list from a zip to a dir."""
# Filter the extract_item_list to remove any items that do not exist in the
# zip file. Otherwise, the extraction step will fail.
with zipfile.ZipFile(input_zip, allowZip64=True) as input_zipfile:
input_namelist = input_zipfile.namelist()
filtered_extract_item_list = []
for pattern in extract_item_list:
if fnmatch.filter(input_namelist, pattern):
filtered_extract_item_list.append(pattern)
common.UnzipToDir(input_zip, output_dir, filtered_extract_item_list)
def CopyItems(from_dir, to_dir, patterns):
"""Similar to ExtractItems() except uses an input dir instead of zip."""
file_paths = []
for dirpath, _, filenames in os.walk(from_dir):
file_paths.extend(
os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
for filename in filenames)
filtered_file_paths = set()
for pattern in patterns:
filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
for file_path in filtered_file_paths:
original_file_path = os.path.join(from_dir, file_path)
copied_file_path = os.path.join(to_dir, file_path)
copied_file_dir = os.path.dirname(copied_file_path)
if not os.path.exists(copied_file_dir):
os.makedirs(copied_file_dir)
if os.path.islink(original_file_path):
os.symlink(os.readlink(original_file_path), copied_file_path)
else:
shutil.copyfile(original_file_path, copied_file_path)
def WriteSortedData(data, path):
"""Writes the sorted contents of either a list or dict to file.
This function sorts the contents of the list or dict and then writes the
resulting sorted contents to a file specified by path.
Args:
data: The list or dict to sort and write.
path: Path to the file to write the sorted values to. The file at path will
be overridden if it exists.
"""
with open(path, 'w') as output:
for entry in sorted(data):
out_str = '{}={}\n'.format(entry, data[entry]) if isinstance(
data, dict) else '{}\n'.format(entry)
output.write(out_str)
# The merge config lists should not attempt to extract items from both
# builds for any of the following partitions. The partitions in
# SINGLE_BUILD_PARTITIONS should come entirely from a single build (either
# framework or vendor, but not both).
_SINGLE_BUILD_PARTITIONS = (
'BOOT/',
'DATA/',
'ODM/',
'PRODUCT/',
'SYSTEM_EXT/',
'RADIO/',
'RECOVERY/',
'ROOT/',
'SYSTEM/',
'SYSTEM_OTHER/',
'VENDOR/',
'VENDOR_DLKM/',
'ODM_DLKM/',
'SYSTEM_DLKM/',
)
def ValidateConfigLists():
"""Performs validations on the merge config lists.
Returns:
False if a validation fails, otherwise true.
"""
has_error = False
# Check that partitions only come from one input.
for partition in _SINGLE_BUILD_PARTITIONS:
image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
in_framework = (
any(item.startswith(partition) for item in OPTIONS.framework_item_list)
or image_path in OPTIONS.framework_item_list)
in_vendor = (
any(item.startswith(partition) for item in OPTIONS.vendor_item_list) or
image_path in OPTIONS.vendor_item_list)
if in_framework and in_vendor:
logger.error(
'Cannot extract items from %s for both the framework and vendor'
' builds. Please ensure only one merge config item list'
' includes %s.', partition, partition)
has_error = True
if any([
key in OPTIONS.framework_misc_info_keys
for key in ('dynamic_partition_list', 'super_partition_groups')
]):
logger.error('Dynamic partition misc info keys should come from '
'the vendor instance of META/misc_info.txt.')
has_error = True
return not has_error
# In an item list (framework or vendor), we may see entries that select whole
# partitions. Such an entry might look like this 'SYSTEM/*' (e.g., for the
# system partition). The following regex matches this and extracts the
# partition name.
_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
def ItemListToPartitionSet(item_list):
"""Converts a target files item list to a partition set.
The item list contains items that might look like 'SYSTEM/*' or 'VENDOR/*' or
'OTA/android-info.txt'. Items that end in '/*' are assumed to match entire
directories where 'SYSTEM' or 'VENDOR' is a directory name that identifies the
contents of a partition of the same name. Other items in the list, such as the
'OTA' example contain metadata. This function iterates such a list, returning
a set that contains the partition entries.
Args:
item_list: A list of items in a target files package.
Returns:
A set of partitions extracted from the list of items.
"""
partition_set = set()
for item in item_list:
partition_match = _PARTITION_ITEM_PATTERN.search(item.strip())
partition_tag = partition_match.group(
1).lower() if partition_match else None
if partition_tag:
partition_set.add(partition_tag)
return partition_set

View File

@@ -0,0 +1,114 @@
#
# Copyright (C) 2022 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import shutil
import common
import merge_compatibility_checks
import merge_target_files
import test_utils
class MergeCompatibilityChecksTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.partition_map = {
'system': 'system',
'system_ext': 'system_ext',
'product': 'product',
'vendor': 'vendor',
'odm': 'odm',
}
self.OPTIONS = merge_target_files.OPTIONS
self.OPTIONS.framework_partition_set = set(
['product', 'system', 'system_ext'])
self.OPTIONS.vendor_partition_set = set(['odm', 'vendor'])
def test_CheckCombinedSepolicy(self):
product_out_dir = common.MakeTempDir()
def write_temp_file(path, data=''):
full_path = os.path.join(product_out_dir, path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
with open(full_path, 'w') as f:
f.write(data)
write_temp_file(
'system/etc/vintf/compatibility_matrix.device.xml', """
<compatibility-matrix>
<sepolicy>
<kernel-sepolicy-version>30</kernel-sepolicy-version>
</sepolicy>
</compatibility-matrix>""")
write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
write_temp_file('system/etc/selinux/plat_sepolicy.cil')
write_temp_file('system/etc/selinux/mapping/30.0.cil')
write_temp_file('product/etc/selinux/mapping/30.0.cil')
write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
cmd = merge_compatibility_checks.CheckCombinedSepolicy(
product_out_dir, self.partition_map, execute=False)
self.assertEqual(' '.join(cmd),
('secilc -m -M true -G -N -c 30 '
'-o {OTP}/META/combined_sepolicy -f /dev/null '
'{OTP}/system/etc/selinux/plat_sepolicy.cil '
'{OTP}/system/etc/selinux/mapping/30.0.cil '
'{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
'{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
'{OTP}/product/etc/selinux/mapping/30.0.cil').format(
OTP=product_out_dir))
def _copy_apex(self, source, output_dir, partition):
shutil.copy(
source,
os.path.join(output_dir, partition, 'apex', os.path.basename(source)))
@test_utils.SkipIfExternalToolsUnavailable()
def test_CheckApexDuplicatePackages(self):
output_dir = common.MakeTempDir()
os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
self._copy_apex(
os.path.join(self.testdata_dir, 'has_apk.apex'), output_dir, 'SYSTEM')
self._copy_apex(
os.path.join(test_utils.get_current_dir(),
'com.android.apex.compressed.v1.capex'), output_dir,
'VENDOR')
self.assertEqual(
len(
merge_compatibility_checks.CheckApexDuplicatePackages(
output_dir, self.partition_map)), 0)
@test_utils.SkipIfExternalToolsUnavailable()
def test_CheckApexDuplicatePackages_RaisesOnPackageInMultiplePartitions(self):
output_dir = common.MakeTempDir()
os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
same_apex_package = os.path.join(self.testdata_dir, 'has_apk.apex')
self._copy_apex(same_apex_package, output_dir, 'SYSTEM')
self._copy_apex(same_apex_package, output_dir, 'VENDOR')
self.assertEqual(
merge_compatibility_checks.CheckApexDuplicatePackages(
output_dir, self.partition_map)[0],
'Duplicate APEX package_names found in multiple partitions: com.android.wifi'
)

View File

@@ -0,0 +1,110 @@
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import shutil
import common
import merge_meta
import merge_target_files
import test_utils
class MergeMetaTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.OPTIONS = merge_target_files.OPTIONS
self.OPTIONS.framework_partition_set = set(
['product', 'system', 'system_ext'])
self.OPTIONS.vendor_partition_set = set(['odm', 'vendor'])
def test_MergePackageKeys_ReturnsTrueIfNoConflicts(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
os.path.join(framework_meta_dir, 'apexkeys.txt'))
vendor_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_vendor.txt'),
os.path.join(vendor_meta_dir, 'apexkeys.txt'))
merge_meta.MergePackageKeys(framework_meta_dir, vendor_meta_dir,
output_meta_dir, 'apexkeys.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apexkeys_merge.txt')
with open(merged_path) as f:
merged_entries = f.read().split('\n')
output_entries = []
output_path = os.path.join(output_meta_dir, 'apexkeys.txt')
with open(output_path) as f:
output_entries = f.read().split('\n')
return self.assertEqual(merged_entries, output_entries)
def test_MergePackageKeys_ReturnsFalseIfConflictsPresent(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
os.path.join(framework_meta_dir, 'apexkeys.txt'))
conflict_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework_conflict.txt'),
os.path.join(conflict_meta_dir, 'apexkeys.txt'))
self.assertRaises(ValueError, merge_meta.MergePackageKeys,
framework_meta_dir, conflict_meta_dir, output_meta_dir,
'apexkeys.txt')
def test_MergePackageKeys_HandlesApkCertsSyntax(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apkcerts_framework.txt'),
os.path.join(framework_meta_dir, 'apkcerts.txt'))
vendor_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apkcerts_vendor.txt'),
os.path.join(vendor_meta_dir, 'apkcerts.txt'))
merge_meta.MergePackageKeys(framework_meta_dir, vendor_meta_dir,
output_meta_dir, 'apkcerts.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
with open(merged_path) as f:
merged_entries = f.read().split('\n')
output_entries = []
output_path = os.path.join(output_meta_dir, 'apkcerts.txt')
with open(output_path) as f:
output_entries = f.read().split('\n')
return self.assertEqual(merged_entries, output_entries)

View File

@@ -1,288 +0,0 @@
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import shutil
import common
import merge_target_files
import test_utils
from merge_target_files import (
validate_config_lists, DEFAULT_FRAMEWORK_ITEM_LIST,
DEFAULT_VENDOR_ITEM_LIST, DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
item_list_to_partition_set, merge_package_keys_txt, compile_split_sepolicy,
validate_merged_apex_info)
class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.testdata_dir = test_utils.get_testdata_dir()
self.OPTIONS = merge_target_files.OPTIONS
self.OPTIONS.framework_item_list = DEFAULT_FRAMEWORK_ITEM_LIST
self.OPTIONS.framework_misc_info_keys = DEFAULT_FRAMEWORK_MISC_INFO_KEYS
self.OPTIONS.vendor_item_list = DEFAULT_VENDOR_ITEM_LIST
self.OPTIONS.framework_partition_set = set(
['product', 'system', 'system_ext'])
self.OPTIONS.vendor_partition_set = set(['odm', 'vendor'])
def test_copy_items_CopiesItemsMatchingPatterns(self):
def createEmptyFile(path):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
open(path, 'a').close()
return path
def createSymLink(source, dest):
os.symlink(source, dest)
return dest
def getRelPaths(start, filepaths):
return set(
os.path.relpath(path=filepath, start=start) for filepath in filepaths)
input_dir = common.MakeTempDir()
output_dir = common.MakeTempDir()
expected_copied_items = []
actual_copied_items = []
patterns = ['*.cpp', 'subdir/*.txt']
# Create various files that we expect to get copied because they
# match one of the patterns.
expected_copied_items.extend([
createEmptyFile(os.path.join(input_dir, 'a.cpp')),
createEmptyFile(os.path.join(input_dir, 'b.cpp')),
createEmptyFile(os.path.join(input_dir, 'subdir', 'c.txt')),
createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
createEmptyFile(
os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
])
# Create some more files that we expect to not get copied.
createEmptyFile(os.path.join(input_dir, 'a.h'))
createEmptyFile(os.path.join(input_dir, 'b.h'))
createEmptyFile(os.path.join(input_dir, 'subdir', 'subsubdir', 'f.gif'))
createSymLink('a.h', os.path.join(input_dir, 'a_link.h'))
# Copy items.
copy_items(input_dir, output_dir, patterns)
# Assert the actual copied items match the ones we expected.
for dirpath, _, filenames in os.walk(output_dir):
actual_copied_items.extend(
os.path.join(dirpath, filename) for filename in filenames)
self.assertEqual(
getRelPaths(output_dir, actual_copied_items),
getRelPaths(input_dir, expected_copied_items))
self.assertEqual(
os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
def test_validate_config_lists_ReturnsFalseIfMissingDefaultItem(self):
self.OPTIONS.framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
self.OPTIONS.framework_item_list.remove('SYSTEM/*')
self.assertFalse(validate_config_lists())
def test_validate_config_lists_ReturnsTrueIfDefaultItemInDifferentList(self):
self.OPTIONS.framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
self.OPTIONS.framework_item_list.remove('ROOT/*')
self.OPTIONS.vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
self.OPTIONS.vendor_item_list.append('ROOT/*')
self.assertTrue(validate_config_lists())
def test_validate_config_lists_ReturnsTrueIfExtraItem(self):
self.OPTIONS.framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
self.OPTIONS.framework_item_list.append('MY_NEW_PARTITION/*')
self.assertTrue(validate_config_lists())
def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartition(self):
self.OPTIONS.vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
self.OPTIONS.vendor_item_list.append('SYSTEM/my_system_file')
self.assertFalse(validate_config_lists())
def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartitionImage(
self):
self.OPTIONS.vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
self.OPTIONS.vendor_item_list.append('IMAGES/system.img')
self.assertFalse(validate_config_lists())
def test_validate_config_lists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
self.OPTIONS.framework_misc_info_keys = list(
DEFAULT_FRAMEWORK_MISC_INFO_KEYS)
self.OPTIONS.framework_misc_info_keys.append(bad_key)
self.assertFalse(validate_config_lists())
def test_merge_package_keys_txt_ReturnsTrueIfNoConflicts(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
os.path.join(framework_meta_dir, 'apexkeys.txt'))
vendor_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_vendor.txt'),
os.path.join(vendor_meta_dir, 'apexkeys.txt'))
merge_package_keys_txt(framework_meta_dir, vendor_meta_dir, output_meta_dir,
'apexkeys.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apexkeys_merge.txt')
with open(merged_path) as f:
merged_entries = f.read().split('\n')
output_entries = []
output_path = os.path.join(output_meta_dir, 'apexkeys.txt')
with open(output_path) as f:
output_entries = f.read().split('\n')
return self.assertEqual(merged_entries, output_entries)
def test_process_apex_keys_apk_certs_ReturnsFalseIfConflictsPresent(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
os.path.join(framework_meta_dir, 'apexkeys.txt'))
conflict_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apexkeys_framework_conflict.txt'),
os.path.join(conflict_meta_dir, 'apexkeys.txt'))
self.assertRaises(ValueError, merge_package_keys_txt, framework_meta_dir,
conflict_meta_dir, output_meta_dir, 'apexkeys.txt')
def test_process_apex_keys_apk_certs_HandlesApkCertsSyntax(self):
output_meta_dir = common.MakeTempDir()
framework_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apkcerts_framework.txt'),
os.path.join(framework_meta_dir, 'apkcerts.txt'))
vendor_meta_dir = common.MakeTempDir()
os.symlink(
os.path.join(self.testdata_dir, 'apkcerts_vendor.txt'),
os.path.join(vendor_meta_dir, 'apkcerts.txt'))
merge_package_keys_txt(framework_meta_dir, vendor_meta_dir, output_meta_dir,
'apkcerts.txt')
merged_entries = []
merged_path = os.path.join(self.testdata_dir, 'apkcerts_merge.txt')
with open(merged_path) as f:
merged_entries = f.read().split('\n')
output_entries = []
output_path = os.path.join(output_meta_dir, 'apkcerts.txt')
with open(output_path) as f:
output_entries = f.read().split('\n')
return self.assertEqual(merged_entries, output_entries)
def test_item_list_to_partition_set(self):
item_list = [
'META/apexkeys.txt',
'META/apkcerts.txt',
'META/filesystem_config.txt',
'PRODUCT/*',
'SYSTEM/*',
'SYSTEM_EXT/*',
]
partition_set = item_list_to_partition_set(item_list)
self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
def test_compile_split_sepolicy(self):
product_out_dir = common.MakeTempDir()
def write_temp_file(path, data=''):
full_path = os.path.join(product_out_dir, path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
with open(full_path, 'w') as f:
f.write(data)
write_temp_file(
'system/etc/vintf/compatibility_matrix.device.xml', """
<compatibility-matrix>
<sepolicy>
<kernel-sepolicy-version>30</kernel-sepolicy-version>
</sepolicy>
</compatibility-matrix>""")
write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
write_temp_file('system/etc/selinux/plat_sepolicy.cil')
write_temp_file('system/etc/selinux/mapping/30.0.cil')
write_temp_file('product/etc/selinux/mapping/30.0.cil')
write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
cmd = compile_split_sepolicy(product_out_dir, {
'system': 'system',
'product': 'product',
'vendor': 'vendor',
})
self.assertEqual(' '.join(cmd),
('secilc -m -M true -G -N -c 30 '
'-o {OTP}/META/combined_sepolicy -f /dev/null '
'{OTP}/system/etc/selinux/plat_sepolicy.cil '
'{OTP}/system/etc/selinux/mapping/30.0.cil '
'{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
'{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
'{OTP}/product/etc/selinux/mapping/30.0.cil').format(
OTP=product_out_dir))
def _copy_apex(self, source, output_dir, partition):
shutil.copy(
source,
os.path.join(output_dir, partition, 'apex', os.path.basename(source)))
@test_utils.SkipIfExternalToolsUnavailable()
def test_validate_merged_apex_info(self):
output_dir = common.MakeTempDir()
os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
self._copy_apex(
os.path.join(self.testdata_dir, 'has_apk.apex'), output_dir, 'SYSTEM')
self._copy_apex(
os.path.join(test_utils.get_current_dir(),
'com.android.apex.compressed.v1.capex'), output_dir,
'VENDOR')
validate_merged_apex_info(output_dir, ('system', 'vendor'))
@test_utils.SkipIfExternalToolsUnavailable()
def test_validate_merged_apex_info_RaisesOnPackageInMultiplePartitions(self):
output_dir = common.MakeTempDir()
os.makedirs(os.path.join(output_dir, 'SYSTEM/apex'))
os.makedirs(os.path.join(output_dir, 'VENDOR/apex'))
same_apex_package = os.path.join(self.testdata_dir, 'has_apk.apex')
self._copy_apex(same_apex_package, output_dir, 'SYSTEM')
self._copy_apex(same_apex_package, output_dir, 'VENDOR')
self.assertRaisesRegexp(
common.ExternalError,
'Duplicate APEX packages found in multiple partitions: com.android.wifi',
validate_merged_apex_info, output_dir, ('system', 'vendor'))

View File

@@ -0,0 +1,118 @@
#
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import common
import merge_target_files
import merge_utils
import test_utils
from merge_target_files import (
DEFAULT_FRAMEWORK_ITEM_LIST,
DEFAULT_VENDOR_ITEM_LIST,
DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
)
class MergeUtilsTest(test_utils.ReleaseToolsTestCase):
def setUp(self):
self.OPTIONS = merge_target_files.OPTIONS
self.OPTIONS.framework_item_list = DEFAULT_FRAMEWORK_ITEM_LIST
self.OPTIONS.framework_misc_info_keys = DEFAULT_FRAMEWORK_MISC_INFO_KEYS
self.OPTIONS.vendor_item_list = DEFAULT_VENDOR_ITEM_LIST
def test_CopyItems_CopiesItemsMatchingPatterns(self):
def createEmptyFile(path):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
open(path, 'a').close()
return path
def createSymLink(source, dest):
os.symlink(source, dest)
return dest
def getRelPaths(start, filepaths):
return set(
os.path.relpath(path=filepath, start=start) for filepath in filepaths)
input_dir = common.MakeTempDir()
output_dir = common.MakeTempDir()
expected_copied_items = []
actual_copied_items = []
patterns = ['*.cpp', 'subdir/*.txt']
# Create various files that we expect to get copied because they
# match one of the patterns.
expected_copied_items.extend([
createEmptyFile(os.path.join(input_dir, 'a.cpp')),
createEmptyFile(os.path.join(input_dir, 'b.cpp')),
createEmptyFile(os.path.join(input_dir, 'subdir', 'c.txt')),
createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
createEmptyFile(
os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
])
# Create some more files that we expect to not get copied.
createEmptyFile(os.path.join(input_dir, 'a.h'))
createEmptyFile(os.path.join(input_dir, 'b.h'))
createEmptyFile(os.path.join(input_dir, 'subdir', 'subsubdir', 'f.gif'))
createSymLink('a.h', os.path.join(input_dir, 'a_link.h'))
# Copy items.
merge_utils.CopyItems(input_dir, output_dir, patterns)
# Assert the actual copied items match the ones we expected.
for dirpath, _, filenames in os.walk(output_dir):
actual_copied_items.extend(
os.path.join(dirpath, filename) for filename in filenames)
self.assertEqual(
getRelPaths(output_dir, actual_copied_items),
getRelPaths(input_dir, expected_copied_items))
self.assertEqual(
os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
def test_ValidateConfigLists_ReturnsFalseIfSharedExtractedPartition(self):
self.OPTIONS.vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
self.OPTIONS.vendor_item_list.append('SYSTEM/my_system_file')
self.assertFalse(merge_utils.ValidateConfigLists())
def test_ValidateConfigLists_ReturnsFalseIfSharedExtractedPartitionImage(
self):
self.OPTIONS.vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
self.OPTIONS.vendor_item_list.append('IMAGES/system.img')
self.assertFalse(merge_utils.ValidateConfigLists())
def test_ValidateConfigLists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
self.OPTIONS.framework_misc_info_keys = list(
DEFAULT_FRAMEWORK_MISC_INFO_KEYS)
self.OPTIONS.framework_misc_info_keys.append(bad_key)
self.assertFalse(merge_utils.ValidateConfigLists())
def test_ItemListToPartitionSet(self):
item_list = [
'META/apexkeys.txt',
'META/apkcerts.txt',
'META/filesystem_config.txt',
'PRODUCT/*',
'SYSTEM/*',
'SYSTEM_EXT/*',
]
partition_set = merge_utils.ItemListToPartitionSet(item_list)
self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)