Make change and version bump to AP4A.240914.001

Snap for 12368321 from 4bd215d9e0 to 24Q4-release

Change-Id: I03b356de86d4bd572d894a4de9f26e3479339065
This commit is contained in:
Android Build Coastguard Worker
2024-09-14 01:00:43 +00:00
10 changed files with 557 additions and 60 deletions

View File

@@ -16,12 +16,13 @@
from abc import ABC from abc import ABC
import argparse import argparse
import functools import functools
from build_context import BuildContext
import json import json
import logging import logging
import os import os
from typing import Self import pathlib
import subprocess
from build_context import BuildContext
import test_mapping_module_retriever import test_mapping_module_retriever
@@ -33,6 +34,9 @@ class OptimizedBuildTarget(ABC):
build. build.
""" """
_SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash'
_PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip'
def __init__( def __init__(
self, self,
target: str, target: str,
@@ -75,6 +79,88 @@ class OptimizedBuildTarget(ABC):
f'get_build_targets_impl not implemented in {type(self).__name__}' f'get_build_targets_impl not implemented in {type(self).__name__}'
) )
def _generate_zip_options_for_items(
self,
prefix: str = '',
relative_root: str = '',
list_files: list[str] | None = None,
files: list[str] | None = None,
directories: list[str] | None = None,
) -> list[str]:
if not list_files and not files and not directories:
raise RuntimeError(
f'No items specified to be added to zip! Prefix: {prefix}, Relative'
f' root: {relative_root}'
)
command_segment = []
# These are all soong_zip options so consult soong_zip --help for specifics.
if prefix:
command_segment.append('-P')
command_segment.append(prefix)
if relative_root:
command_segment.append('-C')
command_segment.append(relative_root)
if list_files:
for list_file in list_files:
command_segment.append('-l')
command_segment.append(list_file)
if files:
for file in files:
command_segment.append('-f')
command_segment.append(file)
if directories:
for directory in directories:
command_segment.append('-D')
command_segment.append(directory)
return command_segment
def _query_soong_vars(
self, src_top: pathlib.Path, soong_vars: list[str]
) -> dict[str, str]:
process_result = subprocess.run(
args=[
f'{src_top / self._SOONG_UI_BASH_PATH}',
'--dumpvar-mode',
'--abs',
soong_vars,
],
env=os.environ,
check=False,
capture_output=True,
)
if not process_result.returncode == 0:
logging.error('soong dumpvars command failed! stderr:')
logging.error(process_result.stderr)
raise RuntimeError('Soong dumpvars failed! See log for stderr.')
if not process_result.stdout:
raise RuntimeError(
'Necessary soong variables ' + soong_vars + ' not found.'
)
try:
return {
line.split('=')[0]: line.split('=')[1].strip("'")
for line in process_result.stdout.split('\n')
}
except IndexError as e:
raise RuntimeError(
'Error parsing soong dumpvars output! See output here:'
f' {process_result.stdout}',
e,
)
def _base_zip_command(
self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str
) -> list[str]:
return [
f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }',
'-d',
'-o',
f'{dist_dir / name}',
]
class NullOptimizer(OptimizedBuildTarget): class NullOptimizer(OptimizedBuildTarget):
"""No-op target optimizer. """No-op target optimizer.
@@ -121,8 +207,6 @@ class ChangeInfo:
class GeneralTestsOptimizer(OptimizedBuildTarget): class GeneralTestsOptimizer(OptimizedBuildTarget):
"""general-tests optimizer """general-tests optimizer
TODO(b/358215235): Implement
This optimizer reads in the list of changed files from the file located in This optimizer reads in the list of changed files from the file located in
env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to
determine what test mapping modules will run for the given changes. It then determine what test mapping modules will run for the given changes. It then
@@ -177,6 +261,208 @@ class GeneralTestsOptimizer(OptimizedBuildTarget):
return modules_to_build return modules_to_build
def get_package_outputs_commands_impl(self):
src_top = pathlib.Path(os.environ.get('TOP', os.getcwd()))
dist_dir = pathlib.Path(os.environ.get('DIST_DIR'))
soong_vars = self._query_soong_vars(
src_top,
[
'HOST_OUT_TESTCASES',
'TARGET_OUT_TESTCASES',
'PRODUCT_OUT',
'SOONG_HOST_OUT',
'HOST_OUT',
],
)
host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES'))
target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES'))
product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT'))
soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT'))
host_out = pathlib.Path(soong_vars.get('HOST_OUT'))
host_paths = []
target_paths = []
host_config_files = []
target_config_files = []
for module in self.modules_to_build:
host_path = host_out_testcases / module
if os.path.exists(host_path):
host_paths.append(host_path)
self._collect_config_files(src_top, host_path, host_config_files)
target_path = target_out_testcases / module
if os.path.exists(target_path):
target_paths.append(target_path)
self._collect_config_files(src_top, target_path, target_config_files)
if not os.path.exists(host_path) and not os.path.exists(target_path):
logging.info(f'No host or target build outputs found for {module}.')
zip_commands = []
zip_commands.extend(
self._get_zip_test_configs_zips_commands(
dist_dir,
host_out,
product_out,
host_config_files,
target_config_files,
)
)
zip_command = self._base_zip_command(
host_out, dist_dir, 'general-tests.zip'
)
# Add host testcases.
zip_command.extend(
self._generate_zip_options_for_items(
prefix='host',
relative_root=f'{src_top / soong_host_out}',
directories=host_paths,
)
)
# Add target testcases.
zip_command.extend(
self._generate_zip_options_for_items(
prefix='target',
relative_root=f'{src_top / product_out}',
directories=target_paths,
)
)
# TODO(lucafarsi): Push this logic into a general-tests-minimal build command
# Add necessary tools. These are also hardcoded in general-tests.mk.
framework_path = soong_host_out / 'framework'
zip_command.extend(
self._generate_zip_options_for_items(
prefix='host/tools',
relative_root=str(framework_path),
files=[
f"{framework_path / 'cts-tradefed.jar'}",
f"{framework_path / 'compatibility-host-util.jar'}",
f"{framework_path / 'vts-tradefed.jar'}",
],
)
)
zip_commands.append(zip_command)
return zip_commands
def _collect_config_files(
self,
src_top: pathlib.Path,
root_dir: pathlib.Path,
config_files: list[str],
):
for root, dirs, files in os.walk(src_top / root_dir):
for file in files:
if file.endswith('.config'):
config_files.append(root_dir / file)
def _get_zip_test_configs_zips_commands(
self,
dist_dir: pathlib.Path,
host_out: pathlib.Path,
product_out: pathlib.Path,
host_config_files: list[str],
target_config_files: list[str],
) -> tuple[list[str], list[str]]:
"""Generate general-tests_configs.zip and general-tests_list.zip.
general-tests_configs.zip contains all of the .config files that were
built and general-tests_list.zip contains a text file which lists
all of the .config files that are in general-tests_configs.zip.
general-tests_configs.zip is organized as follows:
/
host/
testcases/
test_1.config
test_2.config
...
target/
testcases/
test_1.config
test_2.config
...
So the process is we write out the paths to all the host config files into
one
file and all the paths to the target config files in another. We also write
the paths to all the config files into a third file to use for
general-tests_list.zip.
Args:
dist_dir: dist directory.
host_out: host out directory.
product_out: product out directory.
host_config_files: list of all host config files.
target_config_files: list of all target config files.
Returns:
The commands to generate general-tests_configs.zip and
general-tests_list.zip
"""
with open(
f"{host_out / 'host_general-tests_list'}", 'w'
) as host_list_file, open(
f"{product_out / 'target_general-tests_list'}", 'w'
) as target_list_file, open(
f"{host_out / 'general-tests_list'}", 'w'
) as list_file:
for config_file in host_config_files:
host_list_file.write(f'{config_file}' + '\n')
list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n')
for config_file in target_config_files:
target_list_file.write(f'{config_file}' + '\n')
list_file.write(
'target/' + os.path.relpath(config_file, product_out) + '\n'
)
zip_commands = []
tests_config_zip_command = self._base_zip_command(
host_out, dist_dir, 'general-tests_configs.zip'
)
tests_config_zip_command.extend(
self._generate_zip_options_for_items(
prefix='host',
relative_root=str(host_out),
list_files=[f"{host_out / 'host_general-tests_list'}"],
)
)
tests_config_zip_command.extend(
self._generate_zip_options_for_items(
prefix='target',
relative_root=str(product_out),
list_files=[
f"{product_out / 'target_general-tests_list'}"
],
),
)
zip_commands.append(tests_config_zip_command)
tests_list_zip_command = self._base_zip_command(
host_out, dist_dir, 'general-tests_list.zip'
)
tests_list_zip_command.extend(
self._generate_zip_options_for_items(
relative_root=str(host_out),
files=[f"{host_out / 'general-tests_list'}"],
)
)
zip_commands.append(tests_list_zip_command)
return zip_commands
def get_enabled_flag(self): def get_enabled_flag(self):
return 'general_tests_optimized' return 'general_tests_optimized'

View File

@@ -19,10 +19,12 @@ import logging
import os import os
import pathlib import pathlib
import re import re
import subprocess
import textwrap
import unittest import unittest
from unittest import mock from unittest import mock
import optimized_targets
from build_context import BuildContext from build_context import BuildContext
import optimized_targets
from pyfakefs import fake_filesystem_unittest from pyfakefs import fake_filesystem_unittest
@@ -43,11 +45,68 @@ class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
def _setup_working_build_env(self): def _setup_working_build_env(self):
self.change_info_file = pathlib.Path('/tmp/change_info') self.change_info_file = pathlib.Path('/tmp/change_info')
self._write_soong_ui_file()
self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases')
self._host_out_testcases.mkdir(parents=True)
self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases')
self._target_out_testcases.mkdir(parents=True)
self._product_out = pathlib.Path('/tmp/top/product_out')
self._product_out.mkdir(parents=True)
self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out')
self._soong_host_out.mkdir(parents=True)
self._host_out = pathlib.Path('/tmp/top/host_out')
self._host_out.mkdir(parents=True)
self._dist_dir = pathlib.Path('/tmp/top/out/dist')
self._dist_dir.mkdir(parents=True)
self.mock_os_environ.update({ self.mock_os_environ.update({
'CHANGE_INFO': str(self.change_info_file), 'CHANGE_INFO': str(self.change_info_file),
'TOP': '/tmp/top',
'DIST_DIR': '/tmp/top/out/dist',
}) })
def _write_soong_ui_file(self):
soong_path = pathlib.Path('/tmp/top/build/soong')
soong_path.mkdir(parents=True)
with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f:
f.write("""
#/bin/bash
echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases'
echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases'
echo PRODUCT_OUT='/tmp/top/product_out'
echo SOONG_HOST_OUT='/tmp/top/soong_host_out'
echo HOST_OUT='/tmp/top/host_out'
""")
os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666)
def _write_change_info_file(self):
change_info_contents = {
'changes': [{
'projectPath': '/project/path',
'revisions': [{
'fileInfos': [{
'path': 'file/path/file_name',
}],
}],
}]
}
with open(self.change_info_file, 'w') as f:
json.dump(change_info_contents, f)
def _write_test_mapping_file(self):
test_mapping_contents = {
'test-mapping-group': [
{
'name': 'test_mapping_module',
},
],
}
with open('/project/path/file/path/TEST_MAPPING', 'w') as f:
json.dump(test_mapping_contents, f)
def test_general_tests_optimized(self): def test_general_tests_optimized(self):
optimizer = self._create_general_tests_optimizer() optimizer = self._create_general_tests_optimizer()
@@ -124,36 +183,56 @@ class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
with self.assertRaises(json.decoder.JSONDecodeError): with self.assertRaises(json.decoder.JSONDecodeError):
build_targets = optimizer.get_build_targets() build_targets = optimizer.get_build_targets()
def _write_change_info_file(self): @mock.patch('subprocess.run')
change_info_contents = { def test_packaging_outputs_success(self, subprocess_run):
'changes': [{ subprocess_run.return_value = self._get_soong_vars_output()
'projectPath': '/project/path', optimizer = self._create_general_tests_optimizer()
'revisions': [{ self._set_up_build_outputs(['test_mapping_module'])
'fileInfos': [{
'path': 'file/path/file_name',
}],
}],
}]
}
with open(self.change_info_file, 'w') as f: targets = optimizer.get_build_targets()
json.dump(change_info_contents, f) package_commands = optimizer.get_package_outputs_commands()
def _write_test_mapping_file(self): self._verify_soong_zip_commands(package_commands, ['test_mapping_module'])
test_mapping_contents = {
'test-mapping-group': [
{
'name': 'test_mapping_module',
},
],
}
with open('/project/path/file/path/TEST_MAPPING', 'w') as f: @mock.patch('subprocess.run')
json.dump(test_mapping_contents, f) def test_get_soong_dumpvars_fails_raises(self, subprocess_run):
subprocess_run.return_value = self._get_soong_vars_output(return_code=-1)
optimizer = self._create_general_tests_optimizer()
self._set_up_build_outputs(['test_mapping_module'])
def _create_general_tests_optimizer( targets = optimizer.get_build_targets()
self, build_context: BuildContext = None
): with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'):
package_commands = optimizer.get_package_outputs_commands()
@mock.patch('subprocess.run')
def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run):
subprocess_run.return_value = self._get_soong_vars_output(
stdout='This output is bad'
)
optimizer = self._create_general_tests_optimizer()
self._set_up_build_outputs(['test_mapping_module'])
targets = optimizer.get_build_targets()
with self.assertRaisesRegex(
RuntimeError, 'Error parsing soong dumpvars output'
):
package_commands = optimizer.get_package_outputs_commands()
@mock.patch('subprocess.run')
def test_no_build_outputs_packaging_fails(self, subprocess_run):
subprocess_run.return_value = self._get_soong_vars_output()
optimizer = self._create_general_tests_optimizer()
targets = optimizer.get_build_targets()
with self.assertRaisesRegex(
RuntimeError, 'No items specified to be added to zip'
):
package_commands = optimizer.get_package_outputs_commands()
def _create_general_tests_optimizer(self, build_context: BuildContext = None):
if not build_context: if not build_context:
build_context = self._create_build_context() build_context = self._create_build_context()
return optimized_targets.GeneralTestsOptimizer( return optimized_targets.GeneralTestsOptimizer(
@@ -170,7 +249,9 @@ class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
build_context_dict = {} build_context_dict = {}
build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}] build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}]
if general_tests_optimized: if general_tests_optimized:
build_context_dict['enabledBuildFeatures'].append({'name': 'general_tests_optimized'}) build_context_dict['enabledBuildFeatures'].append(
{'name': 'general_tests_optimized'}
)
build_context_dict['testContext'] = test_context build_context_dict['testContext'] = test_context
return BuildContext(build_context_dict) return BuildContext(build_context_dict)
@@ -199,6 +280,81 @@ class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase):
], ],
} }
def _get_soong_vars_output(
self, return_code: int = 0, stdout: str = ''
) -> subprocess.CompletedProcess:
return_value = subprocess.CompletedProcess(args=[], returncode=return_code)
if not stdout:
stdout = textwrap.dedent(f"""\
HOST_OUT_TESTCASES='{self._host_out_testcases}'
TARGET_OUT_TESTCASES='{self._target_out_testcases}'
PRODUCT_OUT='{self._product_out}'
SOONG_HOST_OUT='{self._soong_host_out}'
HOST_OUT='{self._host_out}'""")
return_value.stdout = stdout
return return_value
def _set_up_build_outputs(self, targets: list[str]):
for target in targets:
host_dir = self._host_out_testcases / target
host_dir.mkdir()
(host_dir / f'{target}.config').touch()
(host_dir / f'test_file').touch()
target_dir = self._target_out_testcases / target
target_dir.mkdir()
(target_dir / f'{target}.config').touch()
(target_dir / f'test_file').touch()
def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]):
"""Verify the structure of the zip commands.
Zip commands have to start with the soong_zip binary path, then are followed
by a couple of options and the name of the file being zipped. Depending on
which zip we are creating look for a few essential items being added in
those zips.
Args:
commands: list of command lists
targets: list of targets expected to be in general-tests.zip
"""
for command in commands:
self.assertEqual(
'/tmp/top/host_out/prebuilts/build-tools/linux-x86/bin/soong_zip',
command[0],
)
self.assertEqual('-d', command[1])
self.assertEqual('-o', command[2])
match (command[3]):
case '/tmp/top/out/dist/general-tests_configs.zip':
self.assertIn(f'{self._host_out}/host_general-tests_list', command)
self.assertIn(
f'{self._product_out}/target_general-tests_list', command
)
return
case '/tmp/top/out/dist/general-tests_list.zip':
self.assertIn('-f', command)
self.assertIn(f'{self._host_out}/general-tests_list', command)
return
case '/tmp/top/out/dist/general-tests.zip':
for target in targets:
self.assertIn(f'{self._host_out_testcases}/{target}', command)
self.assertIn(f'{self._target_out_testcases}/{target}', command)
self.assertIn(
f'{self._soong_host_out}/framework/cts-tradefed.jar', command
)
self.assertIn(
f'{self._soong_host_out}/framework/compatibility-host-util.jar',
command,
)
self.assertIn(
f'{self._soong_host_out}/framework/vts-tradefed.jar', command
)
return
case _:
self.fail(f'malformed command: {command}')
if __name__ == '__main__': if __name__ == '__main__':
# Setup logging to be silent so unit tests can pass through TF. # Setup logging to be silent so unit tests can pass through TF.

View File

@@ -18,4 +18,4 @@
# (like "CRB01"). It must be a single word, and is # (like "CRB01"). It must be a single word, and is
# capitalized by convention. # capitalized by convention.
BUILD_ID=AP4A.240913.002 BUILD_ID=AP4A.240914.001

View File

@@ -18,7 +18,7 @@
# #
# TODO: Should we do all of the images in $(IMAGES_TO_BUILD)? # TODO: Should we do all of the images in $(IMAGES_TO_BUILD)?
_FLAG_PARTITIONS := product system system_ext vendor _FLAG_PARTITIONS := product system vendor
# ----------------------------------------------------------------- # -----------------------------------------------------------------
@@ -28,7 +28,6 @@ _FLAG_PARTITIONS := product system system_ext vendor
# $(1): built aconfig flags file (out) # $(1): built aconfig flags file (out)
# $(2): installed aconfig flags file (out) # $(2): installed aconfig flags file (out)
# $(3): the partition (in) # $(3): the partition (in)
# $(4): input aconfig files for the partition (in)
define generate-partition-aconfig-flag-file define generate-partition-aconfig-flag-file
$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1))) $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
$(eval $(strip $(1)): PRIVATE_IN := $(strip $(4))) $(eval $(strip $(1)): PRIVATE_IN := $(strip $(4)))
@@ -36,12 +35,14 @@ $(strip $(1)): $(ACONFIG) $(strip $(4))
mkdir -p $$(dir $$(PRIVATE_OUT)) mkdir -p $$(dir $$(PRIVATE_OUT))
$$(if $$(PRIVATE_IN), \ $$(if $$(PRIVATE_IN), \
$$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \ $$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \
--filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \ --filter container:$(strip $(3)) \
$$(addprefix --cache ,$$(PRIVATE_IN)), \
echo -n > $$(PRIVATE_OUT) \ echo -n > $$(PRIVATE_OUT) \
) )
$(call copy-one-file, $(1), $(2)) $(call copy-one-file, $(1), $(2))
endef endef
# Create a summary file of build flags for each partition # Create a summary file of build flags for each partition
# $(1): built aconfig flags file (out) # $(1): built aconfig flags file (out)
# $(2): installed aconfig flags file (out) # $(2): installed aconfig flags file (out)
@@ -59,16 +60,22 @@ $(strip $(1)): $(ACONFIG) $(strip $(3))
$(call copy-one-file, $(1), $(2)) $(call copy-one-file, $(1), $(2))
endef endef
$(foreach partition, $(_FLAG_PARTITIONS), \ $(foreach partition, $(_FLAG_PARTITIONS), \
$(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \ $(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \
$(eval $(call generate-partition-aconfig-flag-file, \ $(eval $(call generate-partition-aconfig-flag-file, \
$(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \ $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \
$(aconfig_flag_summaries_protobuf.$(partition)), \ $(aconfig_flag_summaries_protobuf.$(partition)), \
$(partition), \ $(partition), \
$(sort $(foreach m,$(call register-names-for-partition, $(partition)), \ $(sort \
$(foreach m, $(call register-names-for-partition, $(partition)), \
$(ALL_MODULES.$(m).ACONFIG_FILES) \ $(ALL_MODULES.$(m).ACONFIG_FILES) \
)), \ ) \
$(if $(filter system, $(partition)), \
$(foreach m, $(call register-names-for-partition, system_ext), \
$(ALL_MODULES.$(m).ACONFIG_FILES) \
) \
) \
) \
)) \ )) \
) )
@@ -175,4 +182,3 @@ $(foreach partition, $(_FLAG_PARTITIONS), \
$(eval aconfig_storage_flag_map.$(partition):=) \ $(eval aconfig_storage_flag_map.$(partition):=) \
$(eval aconfig_storage_flag_val.$(partition):=) \ $(eval aconfig_storage_flag_val.$(partition):=) \
) )

View File

@@ -79,8 +79,18 @@ pub fn parse_flags(
.read_to_string(&mut contents) .read_to_string(&mut contents)
.with_context(|| format!("failed to read {}", input.source))?; .with_context(|| format!("failed to read {}", input.source))?;
let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents) let mut flag_declarations =
.with_context(|| input.error_context())?; aconfig_protos::flag_declarations::try_from_text_proto(&contents)
.with_context(|| input.error_context())?;
// system_ext flags should be treated as system flags as we are combining /system_ext
// and /system as one container
// TODO: remove this logic when we start enforcing that system_ext cannot be set as
// container in aconfig declaration files.
if flag_declarations.container() == "system_ext" {
flag_declarations.set_container(String::from("system"));
}
ensure!( ensure!(
package == flag_declarations.package(), package == flag_declarations.package(),
"failed to parse {}: expected package {}, got {}", "failed to parse {}: expected package {}, got {}",

View File

@@ -51,4 +51,8 @@ java_library {
static_libs: [ static_libs: [
"libaconfig_java_proto_nano", "libaconfig_java_proto_nano",
], ],
sdk_version: "core_platform",
apex_available: [
"//apex_available:platform",
],
} }

View File

@@ -29,7 +29,7 @@ import java.util.List;
* @hide * @hide
*/ */
public class DeviceProtos { public class DeviceProtos {
static final String[] PATHS = { public static final String[] PATHS = {
TEMPLATE TEMPLATE
}; };
@@ -50,10 +50,11 @@ public class DeviceProtos {
ArrayList<parsed_flag> result = new ArrayList(); ArrayList<parsed_flag> result = new ArrayList();
for (String path : parsedFlagsProtoPaths()) { for (String path : parsedFlagsProtoPaths()) {
FileInputStream inputStream = new FileInputStream(path); try (FileInputStream inputStream = new FileInputStream(path)) {
parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes()); parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes());
for (parsed_flag flag : parsedFlags.parsedFlag) { for (parsed_flag flag : parsedFlags.parsedFlag) {
result.add(flag); result.add(flag);
}
} }
} }
@@ -64,7 +65,7 @@ public class DeviceProtos {
* Returns the list of all on-device aconfig protos paths. * Returns the list of all on-device aconfig protos paths.
* @hide * @hide
*/ */
private static List<String> parsedFlagsProtoPaths() { public static List<String> parsedFlagsProtoPaths() {
ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS)); ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS));
File apexDirectory = new File(APEX_DIR); File apexDirectory = new File(APEX_DIR);

View File

@@ -1,3 +1,4 @@
use crate::load_protos;
use crate::{Flag, FlagSource}; use crate::{Flag, FlagSource};
use crate::{FlagPermission, FlagValue, ValuePickedFrom}; use crate::{FlagPermission, FlagValue, ValuePickedFrom};
use aconfigd_protos::{ use aconfigd_protos::{
@@ -9,13 +10,18 @@ use anyhow::anyhow;
use anyhow::Result; use anyhow::Result;
use protobuf::Message; use protobuf::Message;
use protobuf::SpecialFields; use protobuf::SpecialFields;
use std::collections::HashMap;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::net::Shutdown; use std::net::Shutdown;
use std::os::unix::net::UnixStream; use std::os::unix::net::UnixStream;
pub struct AconfigStorageSource {} pub struct AconfigStorageSource {}
fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> { fn load_flag_to_container() -> Result<HashMap<String, String>> {
Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect())
}
fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> {
let (value, value_picked_from) = match ( let (value, value_picked_from) = match (
&msg.boot_flag_value, &msg.boot_flag_value,
msg.default_flag_value, msg.default_flag_value,
@@ -55,15 +61,21 @@ fn convert(msg: ProtoFlagQueryReturnMessage) -> Result<Flag> {
None => return Err(anyhow!("missing permission")), None => return Err(anyhow!("missing permission")),
}; };
let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?;
let package = msg.package_name.ok_or(anyhow!("missing package name"))?;
let qualified_name = format!("{package}.{name}");
Ok(Flag { Ok(Flag {
name: msg.flag_name.ok_or(anyhow!("missing flag name"))?, name,
package: msg.package_name.ok_or(anyhow!("missing package name"))?, package,
value, value,
permission, permission,
value_picked_from, value_picked_from,
staged_value, staged_value,
container: "-".to_string(), container: containers
.get(&qualified_name)
.cloned()
.unwrap_or_else(|| "<no container>".to_string())
.to_string(),
// TODO: remove once DeviceConfig is not in the CLI. // TODO: remove once DeviceConfig is not in the CLI.
namespace: "-".to_string(), namespace: "-".to_string(),
}) })
@@ -114,9 +126,13 @@ fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> {
impl FlagSource for AconfigStorageSource { impl FlagSource for AconfigStorageSource {
fn list_flags() -> Result<Vec<Flag>> { fn list_flags() -> Result<Vec<Flag>> {
let containers = load_flag_to_container()?;
read_from_socket() read_from_socket()
.map(|query_messages| { .map(|query_messages| {
query_messages.iter().map(|message| convert(message.clone())).collect::<Vec<_>>() query_messages
.iter()
.map(|message| convert(message.clone(), &containers))
.collect::<Vec<_>>()
})? })?
.into_iter() .into_iter()
.collect() .collect()

View File

@@ -264,6 +264,10 @@ A/B OTA specific options
--compression_factor --compression_factor
Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k
--full_ota_partitions
Specify list of partitions should be updated in full OTA fashion, even if
an incremental OTA is about to be generated
""" """
from __future__ import print_function from __future__ import print_function
@@ -283,7 +287,7 @@ import common
import ota_utils import ota_utils
import payload_signer import payload_signer
from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata, from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata,
PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir) PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR)
from common import DoesInputFileContain, IsSparseImage from common import DoesInputFileContain, IsSparseImage
import target_files_diff import target_files_diff
from non_ab_ota import GenerateNonAbOtaPackage from non_ab_ota import GenerateNonAbOtaPackage
@@ -337,6 +341,7 @@ OPTIONS.security_patch_level = None
OPTIONS.max_threads = None OPTIONS.max_threads = None
OPTIONS.vabc_cow_version = None OPTIONS.vabc_cow_version = None
OPTIONS.compression_factor = None OPTIONS.compression_factor = None
OPTIONS.full_ota_partitions = None
POSTINSTALL_CONFIG = 'META/postinstall_config.txt' POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -892,6 +897,14 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
if source_file is not None: if source_file is not None:
source_file = ExtractTargetFiles(source_file) source_file = ExtractTargetFiles(source_file)
if OPTIONS.full_ota_partitions:
for partition in OPTIONS.full_ota_partitions:
for subdir in TARGET_FILES_IMAGES_SUBDIR:
image_path = os.path.join(source_file, subdir, partition + ".img")
if os.path.exists(image_path):
logger.info(
"Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition)
os.remove(image_path)
assert "ab_partitions" in OPTIONS.source_info_dict, \ assert "ab_partitions" in OPTIONS.source_info_dict, \
"META/ab_partitions.txt is required for ab_update." "META/ab_partitions.txt is required for ab_update."
assert "ab_partitions" in OPTIONS.target_info_dict, \ assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -1193,7 +1206,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None):
def main(argv): def main(argv):
def option_handler(o, a): def option_handler(o, a: str):
if o in ("-i", "--incremental_from"): if o in ("-i", "--incremental_from"):
OPTIONS.incremental_source = a OPTIONS.incremental_source = a
elif o == "--full_radio": elif o == "--full_radio":
@@ -1320,6 +1333,9 @@ def main(argv):
else: else:
raise ValueError("Cannot parse value %r for option %r - only " raise ValueError("Cannot parse value %r for option %r - only "
"integers are allowed." % (a, o)) "integers are allowed." % (a, o))
elif o == "--full_ota_partitions":
OPTIONS.full_ota_partitions = set(
a.strip().strip("\"").strip("'").split(","))
else: else:
return False return False
return True return True
@@ -1370,6 +1386,7 @@ def main(argv):
"max_threads=", "max_threads=",
"vabc_cow_version=", "vabc_cow_version=",
"compression_factor=", "compression_factor=",
"full_ota_partitions=",
], extra_option_handler=[option_handler, payload_signer.signer_options]) ], extra_option_handler=[option_handler, payload_signer.signer_options])
common.InitLogging() common.InitLogging()

View File

@@ -184,6 +184,7 @@ import re
import shutil import shutil
import stat import stat
import sys import sys
import shlex
import tempfile import tempfile
import zipfile import zipfile
from xml.etree import ElementTree from xml.etree import ElementTree
@@ -589,7 +590,7 @@ def GetOtaSigningArgs():
if OPTIONS.payload_signer: if OPTIONS.payload_signer:
args.extend(["--payload_signer=" + OPTIONS.payload_signer]) args.extend(["--payload_signer=" + OPTIONS.payload_signer])
if OPTIONS.payload_signer_args: if OPTIONS.payload_signer_args:
args.extend(["--payload_signer_args=" + OPTIONS.payload_signer_args]) args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)])
if OPTIONS.search_path: if OPTIONS.search_path:
args.extend(["--search_path", OPTIONS.search_path]) args.extend(["--search_path", OPTIONS.search_path])
if OPTIONS.payload_signer_maximum_signature_size: if OPTIONS.payload_signer_maximum_signature_size: