Merge changes from topic "hiddenapi_additional_annotations" am: ece454400d

Original change: https://android-review.googlesource.com/c/platform/build/soong/+/1590193

MUST ONLY BE SUBMITTED BY AUTOMERGER

Change-Id: Ie3dc56d6f0ba033a3d80655707c4acf81bccf8f2
This commit is contained in:
Paul Duffin
2021-02-24 00:49:28 +00:00
committed by Automerger Merge Worker
3 changed files with 33 additions and 4 deletions

View File

@@ -254,6 +254,7 @@ func (h *hiddenAPI) hiddenAPIExtractInformation(ctx android.ModuleContext, dexJa
rule.Command(). rule.Command().
BuiltTool("merge_csv"). BuiltTool("merge_csv").
Flag("--zip_input"). Flag("--zip_input").
Flag("--key_field signature").
FlagWithOutput("--output=", indexCSV). FlagWithOutput("--output=", indexCSV).
Inputs(classesJars) Inputs(classesJars)
rule.Build("merged-hiddenapi-index", "Merged Hidden API index") rule.Build("merged-hiddenapi-index", "Merged Hidden API index")

View File

@@ -424,6 +424,7 @@ func metadataRule(ctx android.SingletonContext) android.Path {
rule.Command(). rule.Command().
BuiltTool("merge_csv"). BuiltTool("merge_csv").
Flag("--key_field signature").
FlagWithOutput("--output=", outputPath). FlagWithOutput("--output=", outputPath).
Inputs(metadataCSV) Inputs(metadataCSV)
@@ -535,6 +536,7 @@ func (h *hiddenAPIIndexSingleton) GenerateBuildActions(ctx android.SingletonCont
rule := android.NewRuleBuilder(pctx, ctx) rule := android.NewRuleBuilder(pctx, ctx)
rule.Command(). rule.Command().
BuiltTool("merge_csv"). BuiltTool("merge_csv").
Flag("--key_field signature").
FlagWithArg("--header=", "signature,file,startline,startcol,endline,endcol,properties"). FlagWithArg("--header=", "signature,file,startline,startcol,endline,endcol,properties").
FlagWithOutput("--output=", hiddenAPISingletonPaths(ctx).index). FlagWithOutput("--output=", hiddenAPISingletonPaths(ctx).index).
Inputs(indexes) Inputs(indexes)

View File

@@ -20,6 +20,9 @@ Merge multiple CSV files, possibly with different columns.
import argparse import argparse
import csv import csv
import io import io
import heapq
import itertools
import operator
from zipfile import ZipFile from zipfile import ZipFile
@@ -28,6 +31,10 @@ args_parser.add_argument('--header', help='Comma separated field names; '
'if missing determines the header from input files.') 'if missing determines the header from input files.')
args_parser.add_argument('--zip_input', help='Treat files as ZIP archives containing CSV files to merge.', args_parser.add_argument('--zip_input', help='Treat files as ZIP archives containing CSV files to merge.',
action="store_true") action="store_true")
args_parser.add_argument('--key_field', help='The name of the field by which the rows should be sorted. '
'Must be in the field names. '
'Will be the first field in the output. '
'All input files must be sorted by that field.')
args_parser.add_argument('--output', help='Output file for merged CSV.', args_parser.add_argument('--output', help='Output file for merged CSV.',
default='-', type=argparse.FileType('w')) default='-', type=argparse.FileType('w'))
args_parser.add_argument('files', nargs=argparse.REMAINDER) args_parser.add_argument('files', nargs=argparse.REMAINDER)
@@ -57,10 +64,29 @@ else:
headers = headers.union(reader.fieldnames) headers = headers.union(reader.fieldnames)
fieldnames = sorted(headers) fieldnames = sorted(headers)
# Concatenate all files to output: # By default chain the csv readers together so that the resulting output is
# the concatenation of the rows from each of them:
all_rows = itertools.chain.from_iterable(csv_readers)
if len(csv_readers) > 0:
keyField = args.key_field
if keyField:
assert keyField in fieldnames, (
"--key_field {} not found, must be one of {}\n").format(
keyField, ",".join(fieldnames))
# Make the key field the first field in the output
keyFieldIndex = fieldnames.index(args.key_field)
fieldnames.insert(0, fieldnames.pop(keyFieldIndex))
# Create an iterable that performs a lazy merge sort on the csv readers
# sorting the rows by the key field.
all_rows = heapq.merge(*csv_readers, key=operator.itemgetter(keyField))
# Write all rows from the input files to the output:
writer = csv.DictWriter(args.output, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL, writer = csv.DictWriter(args.output, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL,
dialect='unix', fieldnames=fieldnames) dialect='unix', fieldnames=fieldnames)
writer.writeheader() writer.writeheader()
for reader in csv_readers:
for row in reader: # Read all the rows from the input and write them to the output in the correct
writer.writerow(row) # order:
for row in all_rows:
writer.writerow(row)