Make releasetools pylint clean.

This caught a few bugs/syntax errors (a few character classes were not
escaped properly in regex patterns, some indentation was illegal,
etc).

Change-Id: I50637607524e68c4fb9cad7167f58a46b8d26b2c
This commit is contained in:
Dan Albert
2015-03-23 19:13:21 -07:00
parent 5d60719e42
commit 8b72aefb5a
17 changed files with 3284 additions and 2842 deletions

View File

@@ -55,14 +55,14 @@ def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
return return
def output_sink(fn, data): def output_sink(fn, data):
ofile = open(os.path.join(OPTIONS.input_tmp,"SYSTEM",fn), "w") ofile = open(os.path.join(OPTIONS.input_tmp, "SYSTEM", fn), "w")
ofile.write(data) ofile.write(data)
ofile.close() ofile.close()
if OPTIONS.rebuild_recovery: if OPTIONS.rebuild_recovery:
print("Building new recovery patch") print "Building new recovery patch"
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img, boot_img, common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
info_dict=OPTIONS.info_dict) boot_img, info_dict=OPTIONS.info_dict)
block_list = common.MakeTempFile(prefix="system-blocklist-", suffix=".map") block_list = common.MakeTempFile(prefix="system-blocklist-", suffix=".map")
imgname = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict, imgname = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict,
@@ -88,7 +88,7 @@ def AddVendor(output_zip, prefix="IMAGES/"):
block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map") block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map")
imgname = BuildVendor(OPTIONS.input_tmp, OPTIONS.info_dict, imgname = BuildVendor(OPTIONS.input_tmp, OPTIONS.info_dict,
block_list=block_list) block_list=block_list)
common.ZipWrite(output_zip, imgname, prefix + "vendor.img") common.ZipWrite(output_zip, imgname, prefix + "vendor.img")
common.ZipWrite(output_zip, block_list, prefix + "vendor.map") common.ZipWrite(output_zip, block_list, prefix + "vendor.map")
@@ -110,18 +110,18 @@ def CreateImage(input_dir, info_dict, what, block_list=None):
try: try:
os.symlink(os.path.join(input_dir, what.upper()), os.symlink(os.path.join(input_dir, what.upper()),
os.path.join(input_dir, what)) os.path.join(input_dir, what))
except OSError, e: except OSError as e:
# bogus error on my mac version? # bogus error on my mac version?
# File "./build/tools/releasetools/img_from_target_files", line 86, in AddSystem # File "./build/tools/releasetools/img_from_target_files"
# os.path.join(OPTIONS.input_tmp, "system")) # os.path.join(OPTIONS.input_tmp, "system"))
# OSError: [Errno 17] File exists # OSError: [Errno 17] File exists
if (e.errno == errno.EEXIST): if e.errno == errno.EEXIST:
pass pass
image_props = build_image.ImagePropFromGlobalDict(info_dict, what) image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
fstab = info_dict["fstab"] fstab = info_dict["fstab"]
if fstab: if fstab:
image_props["fs_type" ] = fstab["/" + what].fs_type image_props["fs_type"] = fstab["/" + what].fs_type
if what == "system": if what == "system":
fs_config_prefix = "" fs_config_prefix = ""
@@ -130,10 +130,12 @@ def CreateImage(input_dir, info_dict, what, block_list=None):
fs_config = os.path.join( fs_config = os.path.join(
input_dir, "META/" + fs_config_prefix + "filesystem_config.txt") input_dir, "META/" + fs_config_prefix + "filesystem_config.txt")
if not os.path.exists(fs_config): fs_config = None if not os.path.exists(fs_config):
fs_config = None
fc_config = os.path.join(input_dir, "BOOT/RAMDISK/file_contexts") fc_config = os.path.join(input_dir, "BOOT/RAMDISK/file_contexts")
if not os.path.exists(fc_config): fc_config = None if not os.path.exists(fc_config):
fc_config = None
succ = build_image.BuildImage(os.path.join(input_dir, what), succ = build_image.BuildImage(os.path.join(input_dir, what),
image_props, img, image_props, img,
@@ -173,7 +175,7 @@ def AddUserdata(output_zip, prefix="IMAGES/"):
fstab = OPTIONS.info_dict["fstab"] fstab = OPTIONS.info_dict["fstab"]
if fstab: if fstab:
image_props["fs_type" ] = fstab["/data"].fs_type image_props["fs_type"] = fstab["/data"].fs_type
succ = build_image.BuildImage(user_dir, image_props, img.name) succ = build_image.BuildImage(user_dir, image_props, img.name)
assert succ, "build userdata.img image failed" assert succ, "build userdata.img image failed"
@@ -210,7 +212,7 @@ def AddCache(output_zip, prefix="IMAGES/"):
fstab = OPTIONS.info_dict["fstab"] fstab = OPTIONS.info_dict["fstab"]
if fstab: if fstab:
image_props["fs_type" ] = fstab["/cache"].fs_type image_props["fs_type"] = fstab["/cache"].fs_type
succ = build_image.BuildImage(user_dir, image_props, img.name) succ = build_image.BuildImage(user_dir, image_props, img.name)
assert succ, "build cache.img image failed" assert succ, "build cache.img image failed"
@@ -289,7 +291,7 @@ def AddImagesToTargetFiles(filename):
output_zip.close() output_zip.close()
def main(argv): def main(argv):
def option_handler(o, a): def option_handler(o, _):
if o in ("-a", "--add_missing"): if o in ("-a", "--add_missing"):
OPTIONS.add_missing = True OPTIONS.add_missing = True
elif o in ("-r", "--rebuild_recovery",): elif o in ("-r", "--rebuild_recovery",):
@@ -298,12 +300,10 @@ def main(argv):
return False return False
return True return True
args = common.ParseOptions(argv, __doc__, args = common.ParseOptions(
extra_opts="ar", argv, __doc__, extra_opts="ar",
extra_long_opts=["add_missing", extra_long_opts=["add_missing", "rebuild_recovery"],
"rebuild_recovery", extra_option_handler=option_handler)
],
extra_option_handler=option_handler)
if len(args) != 1: if len(args) != 1:
@@ -317,7 +317,7 @@ if __name__ == '__main__':
try: try:
common.CloseInheritedPipes() common.CloseInheritedPipes()
main(sys.argv[1:]) main(sys.argv[1:])
except common.ExternalError, e: except common.ExternalError as e:
print print
print " ERROR: %s" % (e,) print " ERROR: %s" % (e,)
print print

View File

@@ -20,17 +20,17 @@ import heapq
import itertools import itertools
import multiprocessing import multiprocessing
import os import os
import pprint
import re import re
import subprocess import subprocess
import sys
import threading import threading
import tempfile import tempfile
from rangelib import * from rangelib import RangeSet
__all__ = ["EmptyImage", "DataImage", "BlockImageDiff"] __all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
def compute_patch(src, tgt, imgdiff=False): def compute_patch(src, tgt, imgdiff=False):
srcfd, srcfile = tempfile.mkstemp(prefix="src-") srcfd, srcfile = tempfile.mkstemp(prefix="src-")
tgtfd, tgtfile = tempfile.mkstemp(prefix="tgt-") tgtfd, tgtfile = tempfile.mkstemp(prefix="tgt-")
@@ -69,7 +69,16 @@ def compute_patch(src, tgt, imgdiff=False):
except OSError: except OSError:
pass pass
class EmptyImage(object):
class Image(object):
def ReadRangeSet(self, ranges):
raise NotImplementedError
def TotalSha1(self):
raise NotImplementedError
class EmptyImage(Image):
"""A zero-length image.""" """A zero-length image."""
blocksize = 4096 blocksize = 4096
care_map = RangeSet() care_map = RangeSet()
@@ -81,7 +90,7 @@ class EmptyImage(object):
return sha1().hexdigest() return sha1().hexdigest()
class DataImage(object): class DataImage(Image):
"""An image wrapped around a single string of data.""" """An image wrapped around a single string of data."""
def __init__(self, data, trim=False, pad=False): def __init__(self, data, trim=False, pad=False):
@@ -126,9 +135,7 @@ class DataImage(object):
return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges] return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges]
def TotalSha1(self): def TotalSha1(self):
if not hasattr(self, "sha1"): return sha1(self.data).hexdigest()
self.sha1 = sha1(self.data).hexdigest()
return self.sha1
class Transfer(object): class Transfer(object):
@@ -196,9 +203,13 @@ class BlockImageDiff(object):
def __init__(self, tgt, src=None, threads=None, version=3): def __init__(self, tgt, src=None, threads=None, version=3):
if threads is None: if threads is None:
threads = multiprocessing.cpu_count() // 2 threads = multiprocessing.cpu_count() // 2
if threads == 0: threads = 1 if threads == 0:
threads = 1
self.threads = threads self.threads = threads
self.version = version self.version = version
self.transfers = []
self.src_basenames = {}
self.src_numpatterns = {}
assert version in (1, 2, 3) assert version in (1, 2, 3)
@@ -247,7 +258,7 @@ class BlockImageDiff(object):
self.ComputePatches(prefix) self.ComputePatches(prefix)
self.WriteTransfers(prefix) self.WriteTransfers(prefix)
def HashBlocks(self, source, ranges): def HashBlocks(self, source, ranges): # pylint: disable=no-self-use
data = source.ReadRangeSet(ranges) data = source.ReadRangeSet(ranges)
ctx = sha1() ctx = sha1()
@@ -300,7 +311,7 @@ class BlockImageDiff(object):
free_string = [] free_string = []
if self.version == 1: if self.version == 1:
src_string = xf.src_ranges.to_string_raw() src_str = xf.src_ranges.to_string_raw()
elif self.version >= 2: elif self.version >= 2:
# <# blocks> <src ranges> # <# blocks> <src ranges>
@@ -310,7 +321,7 @@ class BlockImageDiff(object):
# <# blocks> - <stash refs...> # <# blocks> - <stash refs...>
size = xf.src_ranges.size() size = xf.src_ranges.size()
src_string = [str(size)] src_str = [str(size)]
unstashed_src_ranges = xf.src_ranges unstashed_src_ranges = xf.src_ranges
mapped_stashes = [] mapped_stashes = []
@@ -322,10 +333,10 @@ class BlockImageDiff(object):
sr = xf.src_ranges.map_within(sr) sr = xf.src_ranges.map_within(sr)
mapped_stashes.append(sr) mapped_stashes.append(sr)
if self.version == 2: if self.version == 2:
src_string.append("%d:%s" % (sid, sr.to_string_raw())) src_str.append("%d:%s" % (sid, sr.to_string_raw()))
else: else:
assert sh in stashes assert sh in stashes
src_string.append("%s:%s" % (sh, sr.to_string_raw())) src_str.append("%s:%s" % (sh, sr.to_string_raw()))
stashes[sh] -= 1 stashes[sh] -= 1
if stashes[sh] == 0: if stashes[sh] == 0:
free_string.append("free %s\n" % (sh)) free_string.append("free %s\n" % (sh))
@@ -333,17 +344,17 @@ class BlockImageDiff(object):
heapq.heappush(free_stash_ids, sid) heapq.heappush(free_stash_ids, sid)
if unstashed_src_ranges: if unstashed_src_ranges:
src_string.insert(1, unstashed_src_ranges.to_string_raw()) src_str.insert(1, unstashed_src_ranges.to_string_raw())
if xf.use_stash: if xf.use_stash:
mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges) mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges)
src_string.insert(2, mapped_unstashed.to_string_raw()) src_str.insert(2, mapped_unstashed.to_string_raw())
mapped_stashes.append(mapped_unstashed) mapped_stashes.append(mapped_unstashed)
self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes) self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
else: else:
src_string.insert(1, "-") src_str.insert(1, "-")
self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes) self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
src_string = " ".join(src_string) src_str = " ".join(src_str)
# all versions: # all versions:
# zero <rangeset> # zero <rangeset>
@@ -356,14 +367,14 @@ class BlockImageDiff(object):
# move <src rangeset> <tgt rangeset> # move <src rangeset> <tgt rangeset>
# #
# version 2: # version 2:
# bsdiff patchstart patchlen <tgt rangeset> <src_string> # bsdiff patchstart patchlen <tgt rangeset> <src_str>
# imgdiff patchstart patchlen <tgt rangeset> <src_string> # imgdiff patchstart patchlen <tgt rangeset> <src_str>
# move <tgt rangeset> <src_string> # move <tgt rangeset> <src_str>
# #
# version 3: # version 3:
# bsdiff patchstart patchlen srchash tgthash <tgt rangeset> <src_string> # bsdiff patchstart patchlen srchash tgthash <tgt rangeset> <src_str>
# imgdiff patchstart patchlen srchash tgthash <tgt rangeset> <src_string> # imgdiff patchstart patchlen srchash tgthash <tgt rangeset> <src_str>
# move hash <tgt rangeset> <src_string> # move hash <tgt rangeset> <src_str>
tgt_size = xf.tgt_ranges.size() tgt_size = xf.tgt_ranges.size()
@@ -383,12 +394,12 @@ class BlockImageDiff(object):
elif self.version == 2: elif self.version == 2:
out.append("%s %s %s\n" % ( out.append("%s %s %s\n" % (
xf.style, xf.style,
xf.tgt_ranges.to_string_raw(), src_string)) xf.tgt_ranges.to_string_raw(), src_str))
elif self.version >= 3: elif self.version >= 3:
out.append("%s %s %s %s\n" % ( out.append("%s %s %s %s\n" % (
xf.style, xf.style,
self.HashBlocks(self.tgt, xf.tgt_ranges), self.HashBlocks(self.tgt, xf.tgt_ranges),
xf.tgt_ranges.to_string_raw(), src_string)) xf.tgt_ranges.to_string_raw(), src_str))
total += tgt_size total += tgt_size
elif xf.style in ("bsdiff", "imgdiff"): elif xf.style in ("bsdiff", "imgdiff"):
performs_read = True performs_read = True
@@ -401,14 +412,14 @@ class BlockImageDiff(object):
elif self.version == 2: elif self.version == 2:
out.append("%s %d %d %s %s\n" % ( out.append("%s %d %d %s %s\n" % (
xf.style, xf.patch_start, xf.patch_len, xf.style, xf.patch_start, xf.patch_len,
xf.tgt_ranges.to_string_raw(), src_string)) xf.tgt_ranges.to_string_raw(), src_str))
elif self.version >= 3: elif self.version >= 3:
out.append("%s %d %d %s %s %s %s\n" % ( out.append("%s %d %d %s %s %s %s\n" % (
xf.style, xf.style,
xf.patch_start, xf.patch_len, xf.patch_start, xf.patch_len,
self.HashBlocks(self.src, xf.src_ranges), self.HashBlocks(self.src, xf.src_ranges),
self.HashBlocks(self.tgt, xf.tgt_ranges), self.HashBlocks(self.tgt, xf.tgt_ranges),
xf.tgt_ranges.to_string_raw(), src_string)) xf.tgt_ranges.to_string_raw(), src_str))
total += tgt_size total += tgt_size
elif xf.style == "zero": elif xf.style == "zero":
assert xf.tgt_ranges assert xf.tgt_ranges
@@ -417,7 +428,7 @@ class BlockImageDiff(object):
out.append("%s %s\n" % (xf.style, to_zero.to_string_raw())) out.append("%s %s\n" % (xf.style, to_zero.to_string_raw()))
total += to_zero.size() total += to_zero.size()
else: else:
raise ValueError, "unknown transfer style '%s'\n" % (xf.style,) raise ValueError("unknown transfer style '%s'\n" % xf.style)
if free_string: if free_string:
out.append("".join(free_string)) out.append("".join(free_string))
@@ -527,11 +538,13 @@ class BlockImageDiff(object):
patches = [None] * patch_num patches = [None] * patch_num
# TODO: Rewrite with multiprocessing.ThreadPool?
lock = threading.Lock() lock = threading.Lock()
def diff_worker(): def diff_worker():
while True: while True:
with lock: with lock:
if not diff_q: return if not diff_q:
return
tgt_size, src, tgt, xf, patchnum = diff_q.pop() tgt_size, src, tgt, xf, patchnum = diff_q.pop()
patch = compute_patch(src, tgt, imgdiff=(xf.style == "imgdiff")) patch = compute_patch(src, tgt, imgdiff=(xf.style == "imgdiff"))
size = len(patch) size = len(patch)
@@ -543,7 +556,7 @@ class BlockImageDiff(object):
xf.tgt_name + " (from " + xf.src_name + ")"))) xf.tgt_name + " (from " + xf.src_name + ")")))
threads = [threading.Thread(target=diff_worker) threads = [threading.Thread(target=diff_worker)
for i in range(self.threads)] for _ in range(self.threads)]
for th in threads: for th in threads:
th.start() th.start()
while threads: while threads:
@@ -670,8 +683,6 @@ class BlockImageDiff(object):
stash_size = 0 stash_size = 0
for xf in self.transfers: for xf in self.transfers:
lost = 0
size = xf.src_ranges.size()
for u in xf.goes_before.copy(): for u in xf.goes_before.copy():
# xf should go before u # xf should go before u
if xf.order < u.order: if xf.order < u.order:
@@ -737,7 +748,8 @@ class BlockImageDiff(object):
# Put all sinks at the end of the sequence. # Put all sinks at the end of the sequence.
while True: while True:
sinks = [u for u in G if not u.outgoing] sinks = [u for u in G if not u.outgoing]
if not sinks: break if not sinks:
break
for u in sinks: for u in sinks:
s2.appendleft(u) s2.appendleft(u)
del G[u] del G[u]
@@ -747,14 +759,16 @@ class BlockImageDiff(object):
# Put all the sources at the beginning of the sequence. # Put all the sources at the beginning of the sequence.
while True: while True:
sources = [u for u in G if not u.incoming] sources = [u for u in G if not u.incoming]
if not sources: break if not sources:
break
for u in sources: for u in sources:
s1.append(u) s1.append(u)
del G[u] del G[u]
for iu in u.outgoing: for iu in u.outgoing:
del iu.incoming[u] del iu.incoming[u]
if not G: break if not G:
break
# Find the "best" vertex to put next. "Best" is the one that # Find the "best" vertex to put next. "Best" is the one that
# maximizes the net difference in source blocks saved we get by # maximizes the net difference in source blocks saved we get by
@@ -792,7 +806,8 @@ class BlockImageDiff(object):
print("Generating digraph...") print("Generating digraph...")
for a in self.transfers: for a in self.transfers:
for b in self.transfers: for b in self.transfers:
if a is b: continue if a is b:
continue
# If the blocks written by A are read by B, then B needs to go before A. # If the blocks written by A are read by B, then B needs to go before A.
i = a.tgt_ranges.intersect(b.src_ranges) i = a.tgt_ranges.intersect(b.src_ranges)
@@ -807,7 +822,6 @@ class BlockImageDiff(object):
a.goes_after[b] = size a.goes_after[b] = size
def FindTransfers(self): def FindTransfers(self):
self.transfers = []
empty = RangeSet() empty = RangeSet()
for tgt_fn, tgt_ranges in self.tgt.file_map.items(): for tgt_fn, tgt_ranges in self.tgt.file_map.items():
if tgt_fn == "__ZERO": if tgt_fn == "__ZERO":
@@ -847,9 +861,6 @@ class BlockImageDiff(object):
Transfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers) Transfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
def AbbreviateSourceNames(self): def AbbreviateSourceNames(self):
self.src_basenames = {}
self.src_numpatterns = {}
for k in self.src.file_map.keys(): for k in self.src.file_map.keys():
b = os.path.basename(k) b = os.path.basename(k)
self.src_basenames[b] = k self.src_basenames[b] = k

View File

@@ -72,14 +72,15 @@ def AdjustPartitionSizeForVerity(partition_size):
""" """
success, verity_tree_size = GetVerityTreeSize(partition_size) success, verity_tree_size = GetVerityTreeSize(partition_size)
if not success: if not success:
return 0; return 0
success, verity_metadata_size = GetVerityMetadataSize(partition_size) success, verity_metadata_size = GetVerityMetadataSize(partition_size)
if not success: if not success:
return 0 return 0
return partition_size - verity_tree_size - verity_metadata_size return partition_size - verity_tree_size - verity_metadata_size
def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict): def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
cmd = ("build_verity_tree -A %s %s %s" % (FIXED_SALT, sparse_image_path, verity_image_path)) cmd = "build_verity_tree -A %s %s %s" % (
FIXED_SALT, sparse_image_path, verity_image_path)
print cmd print cmd
status, output = commands.getstatusoutput(cmd) status, output = commands.getstatusoutput(cmd)
if status: if status:
@@ -92,14 +93,10 @@ def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt, def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
block_device, signer_path, key): block_device, signer_path, key):
cmd = ("system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s" % cmd_template = (
(image_size, "system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s")
verity_metadata_path, cmd = cmd_template % (image_size, verity_metadata_path, root_hash, salt,
root_hash, block_device, signer_path, key)
salt,
block_device,
signer_path,
key))
print cmd print cmd
status, output = commands.getstatusoutput(cmd) status, output = commands.getstatusoutput(cmd)
if status: if status:
@@ -125,10 +122,13 @@ def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
return False return False
return True return True
def BuildVerifiedImage(data_image_path, verity_image_path, verity_metadata_path): def BuildVerifiedImage(data_image_path, verity_image_path,
if not Append2Simg(data_image_path, verity_metadata_path, "Could not append verity metadata!"): verity_metadata_path):
if not Append2Simg(data_image_path, verity_metadata_path,
"Could not append verity metadata!"):
return False return False
if not Append2Simg(data_image_path, verity_image_path, "Could not append verity tree!"): if not Append2Simg(data_image_path, verity_image_path,
"Could not append verity tree!"):
return False return False
return True return True
@@ -153,7 +153,8 @@ def MakeVerityEnabledImage(out_file, prop_dict):
Args: Args:
out_file: the location to write the verifiable image at out_file: the location to write the verifiable image at
prop_dict: a dictionary of properties required for image creation and verification prop_dict: a dictionary of properties required for image creation and
verification
Returns: Returns:
True on success, False otherwise. True on success, False otherwise.
""" """
@@ -178,13 +179,8 @@ def MakeVerityEnabledImage(out_file, prop_dict):
# build the metadata blocks # build the metadata blocks
root_hash = prop_dict["verity_root_hash"] root_hash = prop_dict["verity_root_hash"]
salt = prop_dict["verity_salt"] salt = prop_dict["verity_salt"]
if not BuildVerityMetadata(image_size, if not BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
verity_metadata_path, block_dev, signer_path, signer_key):
root_hash,
salt,
block_dev,
signer_path,
signer_key):
shutil.rmtree(tempdir_name, ignore_errors=True) shutil.rmtree(tempdir_name, ignore_errors=True)
return False return False
@@ -223,7 +219,8 @@ def BuildImage(in_dir, prop_dict, out_file,
is_verity_partition = "verity_block_device" in prop_dict is_verity_partition = "verity_block_device" in prop_dict
verity_supported = prop_dict.get("verity") == "true" verity_supported = prop_dict.get("verity") == "true"
# adjust the partition size to make room for the hashes if this is to be verified # adjust the partition size to make room for the hashes if this is to be
# verified
if verity_supported and is_verity_partition: if verity_supported and is_verity_partition:
partition_size = int(prop_dict.get("partition_size")) partition_size = int(prop_dict.get("partition_size"))
adjusted_size = AdjustPartitionSizeForVerity(partition_size) adjusted_size = AdjustPartitionSizeForVerity(partition_size)
@@ -329,7 +326,8 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
d["mount_point"] = mount_point d["mount_point"] = mount_point
if mount_point == "system": if mount_point == "system":
copy_prop("fs_type", "fs_type") copy_prop("fs_type", "fs_type")
# Copy the generic sysetem fs type first, override with specific one if available. # Copy the generic sysetem fs type first, override with specific one if
# available.
copy_prop("system_fs_type", "fs_type") copy_prop("system_fs_type", "fs_type")
copy_prop("system_size", "partition_size") copy_prop("system_size", "partition_size")
copy_prop("system_journal_size", "journal_size") copy_prop("system_journal_size", "journal_size")
@@ -397,7 +395,8 @@ def main(argv):
image_properties = ImagePropFromGlobalDict(glob_dict, mount_point) image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
if not BuildImage(in_dir, image_properties, out_file): if not BuildImage(in_dir, image_properties, out_file):
print >> sys.stderr, "error: failed to build %s from %s" % (out_file, in_dir) print >> sys.stderr, "error: failed to build %s from %s" % (out_file,
in_dir)
exit(1) exit(1)

View File

@@ -1,441 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Check the signatures of all APKs in a target_files .zip file. With
-c, compare the signatures of each package to the ones in a separate
target_files (usually a previously distributed build for the same
device) and flag any changes.
Usage: check_target_file_signatures [flags] target_files
-c (--compare_with) <other_target_files>
Look for compatibility problems between the two sets of target
files (eg., packages whose keys have changed).
-l (--local_cert_dirs) <dir,dir,...>
Comma-separated list of top-level directories to scan for
.x509.pem files. Defaults to "vendor,build". Where cert files
can be found that match APK signatures, the filename will be
printed as the cert name, otherwise a hash of the cert plus its
subject string will be printed instead.
-t (--text)
Dump the certificate information for both packages in comparison
mode (this output is normally suppressed).
"""
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import os
import re
import shutil
import subprocess
import tempfile
import zipfile
try:
from hashlib import sha1 as sha1
except ImportError:
from sha import sha as sha1
import common
# Work around a bug in python's zipfile module that prevents opening
# of zipfiles if any entry has an extra field of between 1 and 3 bytes
# (which is common with zipaligned APKs). This overrides the
# ZipInfo._decodeExtra() method (which contains the bug) with an empty
# version (since we don't need to decode the extra field anyway).
class MyZipInfo(zipfile.ZipInfo):
def _decodeExtra(self):
pass
zipfile.ZipInfo = MyZipInfo
OPTIONS = common.OPTIONS
OPTIONS.text = False
OPTIONS.compare_with = None
OPTIONS.local_cert_dirs = ("vendor", "build")
PROBLEMS = []
PROBLEM_PREFIX = []
def AddProblem(msg):
PROBLEMS.append(" ".join(PROBLEM_PREFIX) + " " + msg)
def Push(msg):
PROBLEM_PREFIX.append(msg)
def Pop():
PROBLEM_PREFIX.pop()
def Banner(msg):
print "-" * 70
print " ", msg
print "-" * 70
def GetCertSubject(cert):
p = common.Run(["openssl", "x509", "-inform", "DER", "-text"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = p.communicate(cert)
if err and not err.strip():
return "(error reading cert subject)"
for line in out.split("\n"):
line = line.strip()
if line.startswith("Subject:"):
return line[8:].strip()
return "(unknown cert subject)"
class CertDB(object):
def __init__(self):
self.certs = {}
def Add(self, cert, name=None):
if cert in self.certs:
if name:
self.certs[cert] = self.certs[cert] + "," + name
else:
if name is None:
name = "unknown cert %s (%s)" % (common.sha1(cert).hexdigest()[:12],
GetCertSubject(cert))
self.certs[cert] = name
def Get(self, cert):
"""Return the name for a given cert."""
return self.certs.get(cert, None)
def FindLocalCerts(self):
to_load = []
for top in OPTIONS.local_cert_dirs:
for dirpath, dirnames, filenames in os.walk(top):
certs = [os.path.join(dirpath, i)
for i in filenames if i.endswith(".x509.pem")]
if certs:
to_load.extend(certs)
for i in to_load:
f = open(i)
cert = common.ParseCertificate(f.read())
f.close()
name, _ = os.path.splitext(i)
name, _ = os.path.splitext(name)
self.Add(cert, name)
ALL_CERTS = CertDB()
def CertFromPKCS7(data, filename):
"""Read the cert out of a PKCS#7-format file (which is what is
stored in a signed .apk)."""
Push(filename + ":")
try:
p = common.Run(["openssl", "pkcs7",
"-inform", "DER",
"-outform", "PEM",
"-print_certs"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = p.communicate(data)
if err and not err.strip():
AddProblem("error reading cert:\n" + err)
return None
cert = common.ParseCertificate(out)
if not cert:
AddProblem("error parsing cert output")
return None
return cert
finally:
Pop()
class APK(object):
def __init__(self, full_filename, filename):
self.filename = filename
Push(filename+":")
try:
self.RecordCerts(full_filename)
self.ReadManifest(full_filename)
finally:
Pop()
def RecordCerts(self, full_filename):
out = set()
try:
f = open(full_filename)
apk = zipfile.ZipFile(f, "r")
pkcs7 = None
for info in apk.infolist():
if info.filename.startswith("META-INF/") and \
(info.filename.endswith(".DSA") or info.filename.endswith(".RSA")):
pkcs7 = apk.read(info.filename)
cert = CertFromPKCS7(pkcs7, info.filename)
out.add(cert)
ALL_CERTS.Add(cert)
if not pkcs7:
AddProblem("no signature")
finally:
f.close()
self.certs = frozenset(out)
def ReadManifest(self, full_filename):
p = common.Run(["aapt", "dump", "xmltree", full_filename,
"AndroidManifest.xml"],
stdout=subprocess.PIPE)
manifest, err = p.communicate()
if err:
AddProblem("failed to read manifest")
return
self.shared_uid = None
self.package = None
for line in manifest.split("\n"):
line = line.strip()
m = re.search('A: (\S*?)(?:\(0x[0-9a-f]+\))?="(.*?)" \(Raw', line)
if m:
name = m.group(1)
if name == "android:sharedUserId":
if self.shared_uid is not None:
AddProblem("multiple sharedUserId declarations")
self.shared_uid = m.group(2)
elif name == "package":
if self.package is not None:
AddProblem("multiple package declarations")
self.package = m.group(2)
if self.package is None:
AddProblem("no package declaration")
class TargetFiles(object):
def __init__(self):
self.max_pkg_len = 30
self.max_fn_len = 20
def LoadZipFile(self, filename):
d, z = common.UnzipTemp(filename, '*.apk')
try:
self.apks = {}
self.apks_by_basename = {}
for dirpath, dirnames, filenames in os.walk(d):
for fn in filenames:
if fn.endswith(".apk"):
fullname = os.path.join(dirpath, fn)
displayname = fullname[len(d)+1:]
apk = APK(fullname, displayname)
self.apks[apk.package] = apk
self.apks_by_basename[os.path.basename(apk.filename)] = apk
self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
self.max_fn_len = max(self.max_fn_len, len(apk.filename))
finally:
shutil.rmtree(d)
self.certmap = common.ReadApkCerts(z)
z.close()
def CheckSharedUids(self):
"""Look for any instances where packages signed with different
certs request the same sharedUserId."""
apks_by_uid = {}
for apk in self.apks.itervalues():
if apk.shared_uid:
apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
for uid in sorted(apks_by_uid.keys()):
apks = apks_by_uid[uid]
for apk in apks[1:]:
if apk.certs != apks[0].certs:
break
else:
# all packages have the same set of certs; this uid is fine.
continue
AddProblem("different cert sets for packages with uid %s" % (uid,))
print "uid %s is shared by packages with different cert sets:" % (uid,)
for apk in apks:
print "%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename)
for cert in apk.certs:
print " ", ALL_CERTS.Get(cert)
print
def CheckExternalSignatures(self):
for apk_filename, certname in self.certmap.iteritems():
if certname == "EXTERNAL":
# Apps marked EXTERNAL should be signed with the test key
# during development, then manually re-signed after
# predexopting. Consider it an error if this app is now
# signed with any key that is present in our tree.
apk = self.apks_by_basename[apk_filename]
name = ALL_CERTS.Get(apk.cert)
if not name.startswith("unknown "):
Push(apk.filename)
AddProblem("hasn't been signed with EXTERNAL cert")
Pop()
def PrintCerts(self):
"""Display a table of packages grouped by cert."""
by_cert = {}
for apk in self.apks.itervalues():
for cert in apk.certs:
by_cert.setdefault(cert, []).append((apk.package, apk))
order = [(-len(v), k) for (k, v) in by_cert.iteritems()]
order.sort()
for _, cert in order:
print "%s:" % (ALL_CERTS.Get(cert),)
apks = by_cert[cert]
apks.sort()
for _, apk in apks:
if apk.shared_uid:
print " %-*s %-*s [%s]" % (self.max_fn_len, apk.filename,
self.max_pkg_len, apk.package,
apk.shared_uid)
else:
print " %-*s %-*s" % (self.max_fn_len, apk.filename,
self.max_pkg_len, apk.package)
print
def CompareWith(self, other):
"""Look for instances where a given package that exists in both
self and other have different certs."""
all = set(self.apks.keys())
all.update(other.apks.keys())
max_pkg_len = max(self.max_pkg_len, other.max_pkg_len)
by_certpair = {}
for i in all:
if i in self.apks:
if i in other.apks:
# in both; should have same set of certs
if self.apks[i].certs != other.apks[i].certs:
by_certpair.setdefault((other.apks[i].certs,
self.apks[i].certs), []).append(i)
else:
print "%s [%s]: new APK (not in comparison target_files)" % (
i, self.apks[i].filename)
else:
if i in other.apks:
print "%s [%s]: removed APK (only in comparison target_files)" % (
i, other.apks[i].filename)
if by_certpair:
AddProblem("some APKs changed certs")
Banner("APK signing differences")
for (old, new), packages in sorted(by_certpair.items()):
for i, o in enumerate(old):
if i == 0:
print "was", ALL_CERTS.Get(o)
else:
print " ", ALL_CERTS.Get(o)
for i, n in enumerate(new):
if i == 0:
print "now", ALL_CERTS.Get(n)
else:
print " ", ALL_CERTS.Get(n)
for i in sorted(packages):
old_fn = other.apks[i].filename
new_fn = self.apks[i].filename
if old_fn == new_fn:
print " %-*s [%s]" % (max_pkg_len, i, old_fn)
else:
print " %-*s [was: %s; now: %s]" % (max_pkg_len, i,
old_fn, new_fn)
print
def main(argv):
def option_handler(o, a):
if o in ("-c", "--compare_with"):
OPTIONS.compare_with = a
elif o in ("-l", "--local_cert_dirs"):
OPTIONS.local_cert_dirs = [i.strip() for i in a.split(",")]
elif o in ("-t", "--text"):
OPTIONS.text = True
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="c:l:t",
extra_long_opts=["compare_with=",
"local_cert_dirs="],
extra_option_handler=option_handler)
if len(args) != 1:
common.Usage(__doc__)
sys.exit(1)
ALL_CERTS.FindLocalCerts()
Push("input target_files:")
try:
target_files = TargetFiles()
target_files.LoadZipFile(args[0])
finally:
Pop()
compare_files = None
if OPTIONS.compare_with:
Push("comparison target_files:")
try:
compare_files = TargetFiles()
compare_files.LoadZipFile(OPTIONS.compare_with)
finally:
Pop()
if OPTIONS.text or not compare_files:
Banner("target files")
target_files.PrintCerts()
target_files.CheckSharedUids()
target_files.CheckExternalSignatures()
if compare_files:
if OPTIONS.text:
Banner("comparison files")
compare_files.PrintCerts()
target_files.CompareWith(compare_files)
if PROBLEMS:
print "%d problem(s) found:\n" % (len(PROBLEMS),)
for p in PROBLEMS:
print p
return 1
return 0
if __name__ == '__main__':
try:
r = main(sys.argv[1:])
sys.exit(r)
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)

View File

@@ -0,0 +1 @@
check_target_files_signatures.py

View File

@@ -0,0 +1,442 @@
#!/usr/bin/env python
#
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Check the signatures of all APKs in a target_files .zip file. With
-c, compare the signatures of each package to the ones in a separate
target_files (usually a previously distributed build for the same
device) and flag any changes.
Usage: check_target_file_signatures [flags] target_files
-c (--compare_with) <other_target_files>
Look for compatibility problems between the two sets of target
files (eg., packages whose keys have changed).
-l (--local_cert_dirs) <dir,dir,...>
Comma-separated list of top-level directories to scan for
.x509.pem files. Defaults to "vendor,build". Where cert files
can be found that match APK signatures, the filename will be
printed as the cert name, otherwise a hash of the cert plus its
subject string will be printed instead.
-t (--text)
Dump the certificate information for both packages in comparison
mode (this output is normally suppressed).
"""
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import os
import re
import shutil
import subprocess
import zipfile
import common
# Work around a bug in python's zipfile module that prevents opening
# of zipfiles if any entry has an extra field of between 1 and 3 bytes
# (which is common with zipaligned APKs). This overrides the
# ZipInfo._decodeExtra() method (which contains the bug) with an empty
# version (since we don't need to decode the extra field anyway).
class MyZipInfo(zipfile.ZipInfo):
def _decodeExtra(self):
pass
zipfile.ZipInfo = MyZipInfo
OPTIONS = common.OPTIONS
OPTIONS.text = False
OPTIONS.compare_with = None
OPTIONS.local_cert_dirs = ("vendor", "build")
PROBLEMS = []
PROBLEM_PREFIX = []
def AddProblem(msg):
PROBLEMS.append(" ".join(PROBLEM_PREFIX) + " " + msg)
def Push(msg):
PROBLEM_PREFIX.append(msg)
def Pop():
PROBLEM_PREFIX.pop()
def Banner(msg):
print "-" * 70
print " ", msg
print "-" * 70
def GetCertSubject(cert):
p = common.Run(["openssl", "x509", "-inform", "DER", "-text"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = p.communicate(cert)
if err and not err.strip():
return "(error reading cert subject)"
for line in out.split("\n"):
line = line.strip()
if line.startswith("Subject:"):
return line[8:].strip()
return "(unknown cert subject)"
class CertDB(object):
def __init__(self):
self.certs = {}
def Add(self, cert, name=None):
if cert in self.certs:
if name:
self.certs[cert] = self.certs[cert] + "," + name
else:
if name is None:
name = "unknown cert %s (%s)" % (common.sha1(cert).hexdigest()[:12],
GetCertSubject(cert))
self.certs[cert] = name
def Get(self, cert):
"""Return the name for a given cert."""
return self.certs.get(cert, None)
def FindLocalCerts(self):
to_load = []
for top in OPTIONS.local_cert_dirs:
for dirpath, _, filenames in os.walk(top):
certs = [os.path.join(dirpath, i)
for i in filenames if i.endswith(".x509.pem")]
if certs:
to_load.extend(certs)
for i in to_load:
f = open(i)
cert = common.ParseCertificate(f.read())
f.close()
name, _ = os.path.splitext(i)
name, _ = os.path.splitext(name)
self.Add(cert, name)
ALL_CERTS = CertDB()
def CertFromPKCS7(data, filename):
"""Read the cert out of a PKCS#7-format file (which is what is
stored in a signed .apk)."""
Push(filename + ":")
try:
p = common.Run(["openssl", "pkcs7",
"-inform", "DER",
"-outform", "PEM",
"-print_certs"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
out, err = p.communicate(data)
if err and not err.strip():
AddProblem("error reading cert:\n" + err)
return None
cert = common.ParseCertificate(out)
if not cert:
AddProblem("error parsing cert output")
return None
return cert
finally:
Pop()
class APK(object):
def __init__(self, full_filename, filename):
self.filename = filename
self.certs = None
self.shared_uid = None
self.package = None
Push(filename+":")
try:
self.RecordCerts(full_filename)
self.ReadManifest(full_filename)
finally:
Pop()
def RecordCerts(self, full_filename):
out = set()
try:
f = open(full_filename)
apk = zipfile.ZipFile(f, "r")
pkcs7 = None
for info in apk.infolist():
if info.filename.startswith("META-INF/") and \
(info.filename.endswith(".DSA") or info.filename.endswith(".RSA")):
pkcs7 = apk.read(info.filename)
cert = CertFromPKCS7(pkcs7, info.filename)
out.add(cert)
ALL_CERTS.Add(cert)
if not pkcs7:
AddProblem("no signature")
finally:
f.close()
self.certs = frozenset(out)
def ReadManifest(self, full_filename):
p = common.Run(["aapt", "dump", "xmltree", full_filename,
"AndroidManifest.xml"],
stdout=subprocess.PIPE)
manifest, err = p.communicate()
if err:
AddProblem("failed to read manifest")
return
self.shared_uid = None
self.package = None
for line in manifest.split("\n"):
line = line.strip()
m = re.search(r'A: (\S*?)(?:\(0x[0-9a-f]+\))?="(.*?)" \(Raw', line)
if m:
name = m.group(1)
if name == "android:sharedUserId":
if self.shared_uid is not None:
AddProblem("multiple sharedUserId declarations")
self.shared_uid = m.group(2)
elif name == "package":
if self.package is not None:
AddProblem("multiple package declarations")
self.package = m.group(2)
if self.package is None:
AddProblem("no package declaration")
class TargetFiles(object):
def __init__(self):
self.max_pkg_len = 30
self.max_fn_len = 20
self.apks = None
self.apks_by_basename = None
self.certmap = None
def LoadZipFile(self, filename):
d, z = common.UnzipTemp(filename, '*.apk')
try:
self.apks = {}
self.apks_by_basename = {}
for dirpath, _, filenames in os.walk(d):
for fn in filenames:
if fn.endswith(".apk"):
fullname = os.path.join(dirpath, fn)
displayname = fullname[len(d)+1:]
apk = APK(fullname, displayname)
self.apks[apk.package] = apk
self.apks_by_basename[os.path.basename(apk.filename)] = apk
self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
self.max_fn_len = max(self.max_fn_len, len(apk.filename))
finally:
shutil.rmtree(d)
self.certmap = common.ReadApkCerts(z)
z.close()
def CheckSharedUids(self):
"""Look for any instances where packages signed with different
certs request the same sharedUserId."""
apks_by_uid = {}
for apk in self.apks.itervalues():
if apk.shared_uid:
apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
for uid in sorted(apks_by_uid.keys()):
apks = apks_by_uid[uid]
for apk in apks[1:]:
if apk.certs != apks[0].certs:
break
else:
# all packages have the same set of certs; this uid is fine.
continue
AddProblem("different cert sets for packages with uid %s" % (uid,))
print "uid %s is shared by packages with different cert sets:" % (uid,)
for apk in apks:
print "%-*s [%s]" % (self.max_pkg_len, apk.package, apk.filename)
for cert in apk.certs:
print " ", ALL_CERTS.Get(cert)
print
def CheckExternalSignatures(self):
for apk_filename, certname in self.certmap.iteritems():
if certname == "EXTERNAL":
# Apps marked EXTERNAL should be signed with the test key
# during development, then manually re-signed after
# predexopting. Consider it an error if this app is now
# signed with any key that is present in our tree.
apk = self.apks_by_basename[apk_filename]
name = ALL_CERTS.Get(apk.cert)
if not name.startswith("unknown "):
Push(apk.filename)
AddProblem("hasn't been signed with EXTERNAL cert")
Pop()
def PrintCerts(self):
"""Display a table of packages grouped by cert."""
by_cert = {}
for apk in self.apks.itervalues():
for cert in apk.certs:
by_cert.setdefault(cert, []).append((apk.package, apk))
order = [(-len(v), k) for (k, v) in by_cert.iteritems()]
order.sort()
for _, cert in order:
print "%s:" % (ALL_CERTS.Get(cert),)
apks = by_cert[cert]
apks.sort()
for _, apk in apks:
if apk.shared_uid:
print " %-*s %-*s [%s]" % (self.max_fn_len, apk.filename,
self.max_pkg_len, apk.package,
apk.shared_uid)
else:
print " %-*s %-*s" % (self.max_fn_len, apk.filename,
self.max_pkg_len, apk.package)
print
def CompareWith(self, other):
"""Look for instances where a given package that exists in both
self and other have different certs."""
all_apks = set(self.apks.keys())
all_apks.update(other.apks.keys())
max_pkg_len = max(self.max_pkg_len, other.max_pkg_len)
by_certpair = {}
for i in all:
if i in self.apks:
if i in other.apks:
# in both; should have same set of certs
if self.apks[i].certs != other.apks[i].certs:
by_certpair.setdefault((other.apks[i].certs,
self.apks[i].certs), []).append(i)
else:
print "%s [%s]: new APK (not in comparison target_files)" % (
i, self.apks[i].filename)
else:
if i in other.apks:
print "%s [%s]: removed APK (only in comparison target_files)" % (
i, other.apks[i].filename)
if by_certpair:
AddProblem("some APKs changed certs")
Banner("APK signing differences")
for (old, new), packages in sorted(by_certpair.items()):
for i, o in enumerate(old):
if i == 0:
print "was", ALL_CERTS.Get(o)
else:
print " ", ALL_CERTS.Get(o)
for i, n in enumerate(new):
if i == 0:
print "now", ALL_CERTS.Get(n)
else:
print " ", ALL_CERTS.Get(n)
for i in sorted(packages):
old_fn = other.apks[i].filename
new_fn = self.apks[i].filename
if old_fn == new_fn:
print " %-*s [%s]" % (max_pkg_len, i, old_fn)
else:
print " %-*s [was: %s; now: %s]" % (max_pkg_len, i,
old_fn, new_fn)
print
def main(argv):
def option_handler(o, a):
if o in ("-c", "--compare_with"):
OPTIONS.compare_with = a
elif o in ("-l", "--local_cert_dirs"):
OPTIONS.local_cert_dirs = [i.strip() for i in a.split(",")]
elif o in ("-t", "--text"):
OPTIONS.text = True
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="c:l:t",
extra_long_opts=["compare_with=",
"local_cert_dirs="],
extra_option_handler=option_handler)
if len(args) != 1:
common.Usage(__doc__)
sys.exit(1)
ALL_CERTS.FindLocalCerts()
Push("input target_files:")
try:
target_files = TargetFiles()
target_files.LoadZipFile(args[0])
finally:
Pop()
compare_files = None
if OPTIONS.compare_with:
Push("comparison target_files:")
try:
compare_files = TargetFiles()
compare_files.LoadZipFile(OPTIONS.compare_with)
finally:
Pop()
if OPTIONS.text or not compare_files:
Banner("target files")
target_files.PrintCerts()
target_files.CheckSharedUids()
target_files.CheckExternalSignatures()
if compare_files:
if OPTIONS.text:
Banner("comparison files")
compare_files.PrintCerts()
target_files.CompareWith(compare_files)
if PROBLEMS:
print "%d problem(s) found:\n" % (len(PROBLEMS),)
for p in PROBLEMS:
print p
return 1
return 0
if __name__ == '__main__':
try:
r = main(sys.argv[1:])
sys.exit(r)
except common.ExternalError as e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)

View File

@@ -30,44 +30,45 @@ import time
import zipfile import zipfile
import blockimgdiff import blockimgdiff
from rangelib import * import rangelib
try: try:
from hashlib import sha1 as sha1 from hashlib import sha1 as sha1
except ImportError: except ImportError:
from sha import sha as sha1 from sha import sha as sha1
# missing in Python 2.4 and before
if not hasattr(os, "SEEK_SET"):
os.SEEK_SET = 0
class Options(object): pass class Options(object):
OPTIONS = Options() def __init__(self):
platform_search_path = {
DEFAULT_SEARCH_PATH_BY_PLATFORM = { "linux2": "out/host/linux-x86",
"linux2": "out/host/linux-x86", "darwin": "out/host/darwin-x86",
"darwin": "out/host/darwin-x86",
} }
OPTIONS.search_path = DEFAULT_SEARCH_PATH_BY_PLATFORM.get(sys.platform, None)
OPTIONS.signapk_path = "framework/signapk.jar" # Relative to search_path self.search_path = platform_search_path.get(sys.platform, None)
OPTIONS.extra_signapk_args = [] self.signapk_path = "framework/signapk.jar" # Relative to search_path
OPTIONS.java_path = "java" # Use the one on the path by default. self.extra_signapk_args = []
OPTIONS.java_args = "-Xmx2048m" # JVM Args self.java_path = "java" # Use the one on the path by default.
OPTIONS.public_key_suffix = ".x509.pem" self.java_args = "-Xmx2048m" # JVM Args
OPTIONS.private_key_suffix = ".pk8" self.public_key_suffix = ".x509.pem"
OPTIONS.verbose = False self.private_key_suffix = ".pk8"
OPTIONS.tempfiles = [] self.verbose = False
OPTIONS.device_specific = None self.tempfiles = []
OPTIONS.extras = {} self.device_specific = None
OPTIONS.info_dict = None self.extras = {}
self.info_dict = None
self.worker_threads = None
OPTIONS = Options()
# Values for "certificate" in apkcerts that mean special things. # Values for "certificate" in apkcerts that mean special things.
SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL") SPECIAL_CERT_STRINGS = ("PRESIGNED", "EXTERNAL")
class ExternalError(RuntimeError): pass class ExternalError(RuntimeError):
pass
def Run(args, **kwargs): def Run(args, **kwargs):
@@ -94,19 +95,19 @@ def CloseInheritedPipes():
pass pass
def LoadInfoDict(input): def LoadInfoDict(input_file):
"""Read and parse the META/misc_info.txt key/value pairs from the """Read and parse the META/misc_info.txt key/value pairs from the
input target files and return a dict.""" input target files and return a dict."""
def read_helper(fn): def read_helper(fn):
if isinstance(input, zipfile.ZipFile): if isinstance(input_file, zipfile.ZipFile):
return input.read(fn) return input_file.read(fn)
else: else:
path = os.path.join(input, *fn.split("/")) path = os.path.join(input_file, *fn.split("/"))
try: try:
with open(path) as f: with open(path) as f:
return f.read() return f.read()
except IOError, e: except IOError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
raise KeyError(fn) raise KeyError(fn)
d = {} d = {}
@@ -122,14 +123,16 @@ def LoadInfoDict(input):
if "mkyaffs2_extra_flags" not in d: if "mkyaffs2_extra_flags" not in d:
try: try:
d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip() d["mkyaffs2_extra_flags"] = read_helper(
"META/mkyaffs2-extra-flags.txt").strip()
except KeyError: except KeyError:
# ok if flags don't exist # ok if flags don't exist
pass pass
if "recovery_api_version" not in d: if "recovery_api_version" not in d:
try: try:
d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip() d["recovery_api_version"] = read_helper(
"META/recovery-api-version.txt").strip()
except KeyError: except KeyError:
raise ValueError("can't find recovery API version in input target-files") raise ValueError("can't find recovery API version in input target-files")
@@ -146,9 +149,11 @@ def LoadInfoDict(input):
try: try:
data = read_helper("META/imagesizes.txt") data = read_helper("META/imagesizes.txt")
for line in data.split("\n"): for line in data.split("\n"):
if not line: continue if not line:
continue
name, value = line.split(" ", 1) name, value = line.split(" ", 1)
if not value: continue if not value:
continue
if name == "blocksize": if name == "blocksize":
d[name] = value d[name] = value
else: else:
@@ -186,7 +191,8 @@ def LoadDictionaryFromLines(lines):
d = {} d = {}
for line in lines: for line in lines:
line = line.strip() line = line.strip()
if not line or line.startswith("#"): continue if not line or line.startswith("#"):
continue
if "=" in line: if "=" in line:
name, value = line.split("=", 1) name, value = line.split("=", 1)
d[name] = value d[name] = value
@@ -194,7 +200,12 @@ def LoadDictionaryFromLines(lines):
def LoadRecoveryFSTab(read_helper, fstab_version): def LoadRecoveryFSTab(read_helper, fstab_version):
class Partition(object): class Partition(object):
pass def __init__(self, mount_point, fs_type, device, length, device2):
self.mount_point = mount_point
self.fs_type = fs_type
self.device = device
self.length = length
self.device2 = device2
try: try:
data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab") data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
@@ -206,68 +217,65 @@ def LoadRecoveryFSTab(read_helper, fstab_version):
d = {} d = {}
for line in data.split("\n"): for line in data.split("\n"):
line = line.strip() line = line.strip()
if not line or line.startswith("#"): continue if not line or line.startswith("#"):
continue
pieces = line.split() pieces = line.split()
if not (3 <= len(pieces) <= 4): if not 3 <= len(pieces) <= 4:
raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
p = Partition()
p.mount_point = pieces[0]
p.fs_type = pieces[1]
p.device = pieces[2]
p.length = 0
options = None options = None
if len(pieces) >= 4: if len(pieces) >= 4:
if pieces[3].startswith("/"): if pieces[3].startswith("/"):
p.device2 = pieces[3] device2 = pieces[3]
if len(pieces) >= 5: if len(pieces) >= 5:
options = pieces[4] options = pieces[4]
else: else:
p.device2 = None device2 = None
options = pieces[3] options = pieces[3]
else: else:
p.device2 = None device2 = None
mount_point = pieces[0]
length = 0
if options: if options:
options = options.split(",") options = options.split(",")
for i in options: for i in options:
if i.startswith("length="): if i.startswith("length="):
p.length = int(i[7:]) length = int(i[7:])
else: else:
print "%s: unknown option \"%s\"" % (p.mount_point, i) print "%s: unknown option \"%s\"" % (mount_point, i)
d[p.mount_point] = p d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[1],
device=pieces[2], length=length,
device2=device2)
elif fstab_version == 2: elif fstab_version == 2:
d = {} d = {}
for line in data.split("\n"): for line in data.split("\n"):
line = line.strip() line = line.strip()
if not line or line.startswith("#"): continue if not line or line.startswith("#"):
continue
pieces = line.split() pieces = line.split()
if len(pieces) != 5: if len(pieces) != 5:
raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,)) raise ValueError("malformed recovery.fstab line: \"%s\"" % (line,))
# Ignore entries that are managed by vold # Ignore entries that are managed by vold
options = pieces[4] options = pieces[4]
if "voldmanaged=" in options: continue if "voldmanaged=" in options:
continue
# It's a good line, parse it # It's a good line, parse it
p = Partition() length = 0
p.device = pieces[0]
p.mount_point = pieces[1]
p.fs_type = pieces[2]
p.device2 = None
p.length = 0
options = options.split(",") options = options.split(",")
for i in options: for i in options:
if i.startswith("length="): if i.startswith("length="):
p.length = int(i[7:]) length = int(i[7:])
else: else:
# Ignore all unknown options in the unified fstab # Ignore all unknown options in the unified fstab
continue continue
d[p.mount_point] = p mount_point = pieces[1]
d[mount_point] = Partition(mount_point=mount_point, fs_type=pieces[2],
device=pieces[0], length=length, device2=None)
else: else:
raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,)) raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
@@ -279,6 +287,7 @@ def DumpInfoDict(d):
for k, v in sorted(d.items()): for k, v in sorted(d.items()):
print "%-25s = (%s) %s" % (k, type(v).__name__, v) print "%-25s = (%s) %s" % (k, type(v).__name__, v)
def BuildBootableImage(sourcedir, fs_config_file, info_dict=None): def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
"""Take a kernel, cmdline, and ramdisk directory from the input (in """Take a kernel, cmdline, and ramdisk directory from the input (in
'sourcedir'), and turn them into a boot image. Return the image 'sourcedir'), and turn them into a boot image. Return the image
@@ -305,8 +314,8 @@ def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
p2.wait() p2.wait()
p1.wait() p1.wait()
assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (targetname,) assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (targetname,) assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
# use MKBOOTIMG from environ, or "mkbootimg" if empty or not set # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
@@ -347,7 +356,8 @@ def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
if info_dict.get("verity_key", None): if info_dict.get("verity_key", None):
path = "/" + os.path.basename(sourcedir).lower() path = "/" + os.path.basename(sourcedir).lower()
cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name] cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8",
info_dict["verity_key"] + ".x509.pem", img.name]
p = Run(cmd, stdout=subprocess.PIPE) p = Run(cmd, stdout=subprocess.PIPE)
p.communicate() p.communicate()
assert p.returncode == 0, "boot_signer of %s image failed" % path assert p.returncode == 0, "boot_signer of %s image failed" % path
@@ -453,7 +463,7 @@ def GetKeyPasswords(keylist):
stdin=devnull.fileno(), stdin=devnull.fileno(),
stdout=devnull.fileno(), stdout=devnull.fileno(),
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
stdout, stderr = p.communicate() _, stderr = p.communicate()
if p.returncode == 0: if p.returncode == 0:
# Encrypted key with empty string as password. # Encrypted key with empty string as password.
key_passwords[k] = '' key_passwords[k] = ''
@@ -524,20 +534,23 @@ def CheckSize(data, target, info_dict):
any, for the given target. Raise exception if the data is too big. any, for the given target. Raise exception if the data is too big.
Print a warning if the data is nearing the maximum size.""" Print a warning if the data is nearing the maximum size."""
if target.endswith(".img"): target = target[:-4] if target.endswith(".img"):
target = target[:-4]
mount_point = "/" + target mount_point = "/" + target
fs_type = None fs_type = None
limit = None limit = None
if info_dict["fstab"]: if info_dict["fstab"]:
if mount_point == "/userdata": mount_point = "/data" if mount_point == "/userdata":
mount_point = "/data"
p = info_dict["fstab"][mount_point] p = info_dict["fstab"][mount_point]
fs_type = p.fs_type fs_type = p.fs_type
device = p.device device = p.device
if "/" in device: if "/" in device:
device = device[device.rfind("/")+1:] device = device[device.rfind("/")+1:]
limit = info_dict.get(device + "_size", None) limit = info_dict.get(device + "_size", None)
if not fs_type or not limit: return if not fs_type or not limit:
return
if fs_type == "yaffs2": if fs_type == "yaffs2":
# image size should be increased by 1/64th to account for the # image size should be increased by 1/64th to account for the
@@ -562,7 +575,8 @@ def ReadApkCerts(tf_zip):
certmap = {} certmap = {}
for line in tf_zip.read("META/apkcerts.txt").split("\n"): for line in tf_zip.read("META/apkcerts.txt").split("\n"):
line = line.strip() line = line.strip()
if not line: continue if not line:
continue
m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+' m = re.match(r'^name="(.*)"\s+certificate="(.*)"\s+'
r'private_key="(.*)"$', line) r'private_key="(.*)"$', line)
if m: if m:
@@ -622,13 +636,11 @@ def ParseOptions(argv,
"java_path=", "java_args=", "public_key_suffix=", "java_path=", "java_args=", "public_key_suffix=",
"private_key_suffix=", "device_specific=", "extra="] + "private_key_suffix=", "device_specific=", "extra="] +
list(extra_long_opts)) list(extra_long_opts))
except getopt.GetoptError, err: except getopt.GetoptError as err:
Usage(docstring) Usage(docstring)
print "**", str(err), "**" print "**", str(err), "**"
sys.exit(2) sys.exit(2)
path_specified = False
for o, a in opts: for o, a in opts:
if o in ("-h", "--help"): if o in ("-h", "--help"):
Usage(docstring) Usage(docstring)
@@ -707,7 +719,8 @@ class PasswordManager(object):
if i not in current or not current[i]: if i not in current or not current[i]:
missing.append(i) missing.append(i)
# Are all the passwords already in the file? # Are all the passwords already in the file?
if not missing: return current if not missing:
return current
for i in missing: for i in missing:
current[i] = "" current[i] = ""
@@ -721,7 +734,7 @@ class PasswordManager(object):
current = self.UpdateAndReadFile(current) current = self.UpdateAndReadFile(current)
def PromptResult(self, current): def PromptResult(self, current): # pylint: disable=no-self-use
"""Prompt the user to enter a value (password) for each key in """Prompt the user to enter a value (password) for each key in
'current' whose value is fales. Returns a new dict with all the 'current' whose value is fales. Returns a new dict with all the
values. values.
@@ -732,9 +745,10 @@ class PasswordManager(object):
result[k] = v result[k] = v
else: else:
while True: while True:
result[k] = getpass.getpass("Enter password for %s key> " result[k] = getpass.getpass(
% (k,)).strip() "Enter password for %s key> " % k).strip()
if result[k]: break if result[k]:
break
return result return result
def UpdateAndReadFile(self, current): def UpdateAndReadFile(self, current):
@@ -742,14 +756,13 @@ class PasswordManager(object):
return self.PromptResult(current) return self.PromptResult(current)
f = open(self.pwfile, "w") f = open(self.pwfile, "w")
os.chmod(self.pwfile, 0600) os.chmod(self.pwfile, 0o600)
f.write("# Enter key passwords between the [[[ ]]] brackets.\n") f.write("# Enter key passwords between the [[[ ]]] brackets.\n")
f.write("# (Additional spaces are harmless.)\n\n") f.write("# (Additional spaces are harmless.)\n\n")
first_line = None first_line = None
sorted = [(not v, k, v) for (k, v) in current.iteritems()] sorted_list = sorted([(not v, k, v) for (k, v) in current.iteritems()])
sorted.sort() for i, (_, k, v) in enumerate(sorted_list):
for i, (_, k, v) in enumerate(sorted):
f.write("[[[ %s ]]] %s\n" % (v, k)) f.write("[[[ %s ]]] %s\n" % (v, k))
if not v and first_line is None: if not v and first_line is None:
# position cursor on first line with no password. # position cursor on first line with no password.
@@ -763,19 +776,21 @@ class PasswordManager(object):
def ReadFile(self): def ReadFile(self):
result = {} result = {}
if self.pwfile is None: return result if self.pwfile is None:
return result
try: try:
f = open(self.pwfile, "r") f = open(self.pwfile, "r")
for line in f: for line in f:
line = line.strip() line = line.strip()
if not line or line[0] == '#': continue if not line or line[0] == '#':
continue
m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line) m = re.match(r"^\[\[\[\s*(.*?)\s*\]\]\]\s*(\S+)$", line)
if not m: if not m:
print "failed to parse password file: ", line print "failed to parse password file: ", line
else: else:
result[m.group(2)] = m.group(1) result[m.group(2)] = m.group(1)
f.close() f.close()
except IOError, e: except IOError as e:
if e.errno != errno.ENOENT: if e.errno != errno.ENOENT:
print "error reading password file: ", str(e) print "error reading password file: ", str(e)
return result return result
@@ -821,16 +836,16 @@ def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
zipfile.ZIP64_LIMIT = saved_zip64_limit zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipWriteStr(zip, filename, data, perms=0644, compression=None): def ZipWriteStr(zip_file, filename, data, perms=0o644, compression=None):
# use a fixed timestamp so the output is repeatable. # use a fixed timestamp so the output is repeatable.
zinfo = zipfile.ZipInfo(filename=filename, zinfo = zipfile.ZipInfo(filename=filename,
date_time=(2009, 1, 1, 0, 0, 0)) date_time=(2009, 1, 1, 0, 0, 0))
if compression is None: if compression is None:
zinfo.compress_type = zip.compression zinfo.compress_type = zip_file.compression
else: else:
zinfo.compress_type = compression zinfo.compress_type = compression
zinfo.external_attr = perms << 16 zinfo.external_attr = perms << 16
zip.writestr(zinfo, data) zip_file.writestr(zinfo, data)
class DeviceSpecificParams(object): class DeviceSpecificParams(object):
@@ -845,7 +860,8 @@ class DeviceSpecificParams(object):
if self.module is None: if self.module is None:
path = OPTIONS.device_specific path = OPTIONS.device_specific
if not path: return if not path:
return
try: try:
if os.path.isdir(path): if os.path.isdir(path):
info = imp.find_module("releasetools", [path]) info = imp.find_module("releasetools", [path])
@@ -983,7 +999,8 @@ class Difference(object):
err = [] err = []
def run(): def run():
_, e = p.communicate() _, e = p.communicate()
if e: err.append(e) if e:
err.append(e)
th = threading.Thread(target=run) th = threading.Thread(target=run)
th.start() th.start()
th.join(timeout=300) # 5 mins th.join(timeout=300) # 5 mins
@@ -1050,7 +1067,7 @@ def ComputeDifferences(diffs):
print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % ( print "%8.2f sec %8d / %8d bytes (%6.2f%%) %s" % (
dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name) dur, len(patch), tf.size, 100.0 * len(patch) / tf.size, name)
lock.release() lock.release()
except Exception, e: except Exception as e:
print e print e
raise raise
@@ -1063,8 +1080,9 @@ def ComputeDifferences(diffs):
threads.pop().join() threads.pop().join()
class BlockDifference: class BlockDifference(object):
def __init__(self, partition, tgt, src=None, check_first_block=False, version=None): def __init__(self, partition, tgt, src=None, check_first_block=False,
version=None):
self.tgt = tgt self.tgt = tgt
self.src = src self.src = src
self.partition = partition self.partition = partition
@@ -1094,7 +1112,8 @@ class BlockDifference:
else: else:
script.Print("Patching %s image after verification." % (self.partition,)) script.Print("Patching %s image after verification." % (self.partition,))
if progress: script.ShowProgress(progress, 0) if progress:
script.ShowProgress(progress, 0)
self._WriteUpdate(script, output_zip) self._WriteUpdate(script, output_zip)
def WriteVerifyScript(self, script): def WriteVerifyScript(self, script):
@@ -1108,11 +1127,11 @@ class BlockDifference:
'"%s.new.dat", "%s.patch.dat") then') % '"%s.new.dat", "%s.patch.dat") then') %
(self.device, partition, partition, partition)) (self.device, partition, partition, partition))
else: else:
script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
(self.device, self.src.care_map.to_string_raw(), self.device, self.src.care_map.to_string_raw(),
self.src.TotalSha1())) self.src.TotalSha1()))
script.Print('Verified %s image...' % (partition,)) script.Print('Verified %s image...' % (partition,))
script.AppendExtra('else'); script.AppendExtra('else')
# When generating incrementals for the system and vendor partitions, # When generating incrementals for the system and vendor partitions,
# explicitly check the first block (which contains the superblock) of # explicitly check the first block (which contains the superblock) of
@@ -1147,9 +1166,9 @@ class BlockDifference:
'package_extract_file("{partition}.transfer.list"), ' 'package_extract_file("{partition}.transfer.list"), '
'"{partition}.new.dat", "{partition}.patch.dat");\n'.format( '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
device=self.device, partition=self.partition)) device=self.device, partition=self.partition))
script.AppendExtra(script._WordWrap(call)) script.AppendExtra(script.WordWrap(call))
def _HashBlocks(self, source, ranges): def _HashBlocks(self, source, ranges): # pylint: disable=no-self-use
data = source.ReadRangeSet(ranges) data = source.ReadRangeSet(ranges)
ctx = sha1() ctx = sha1()
@@ -1159,8 +1178,8 @@ class BlockDifference:
return ctx.hexdigest() return ctx.hexdigest()
def _CheckFirstBlock(self, script): def _CheckFirstBlock(self, script):
r = RangeSet((0, 1)) r = rangelib.RangeSet((0, 1))
srchash = self._HashBlocks(self.src, r); srchash = self._HashBlocks(self.src, r)
script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || ' script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
'abort("%s has been remounted R/W; ' 'abort("%s has been remounted R/W; '
@@ -1172,16 +1191,21 @@ DataImage = blockimgdiff.DataImage
# map recovery.fstab's fs_types to mount/format "partition types" # map recovery.fstab's fs_types to mount/format "partition types"
PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD", PARTITION_TYPES = {
"ext4": "EMMC", "emmc": "EMMC", "yaffs2": "MTD",
"f2fs": "EMMC" } "mtd": "MTD",
"ext4": "EMMC",
"emmc": "EMMC",
"f2fs": "EMMC"
}
def GetTypeAndDevice(mount_point, info): def GetTypeAndDevice(mount_point, info):
fstab = info["fstab"] fstab = info["fstab"]
if fstab: if fstab:
return PARTITION_TYPES[fstab[mount_point].fs_type], fstab[mount_point].device return (PARTITION_TYPES[fstab[mount_point].fs_type],
fstab[mount_point].device)
else: else:
return None raise KeyError
def ParseCertificate(data): def ParseCertificate(data):
@@ -1243,16 +1267,15 @@ if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(rec
else else
log -t recovery "Recovery image already installed" log -t recovery "Recovery image already installed"
fi fi
""" % { 'boot_size': boot_img.size, """ % {'boot_size': boot_img.size,
'boot_sha1': boot_img.sha1, 'boot_sha1': boot_img.sha1,
'recovery_size': recovery_img.size, 'recovery_size': recovery_img.size,
'recovery_sha1': recovery_img.sha1, 'recovery_sha1': recovery_img.sha1,
'boot_type': boot_type, 'boot_type': boot_type,
'boot_device': boot_device, 'boot_device': boot_device,
'recovery_type': recovery_type, 'recovery_type': recovery_type,
'recovery_device': recovery_device, 'recovery_device': recovery_device,
'bonus_args': bonus_args, 'bonus_args': bonus_args}
}
# The install script location moved from /system/etc to /system/bin # The install script location moved from /system/etc to /system/bin
# in the L release. Parse the init.rc file to find out where the # in the L release. Parse the init.rc file to find out where the
@@ -1261,12 +1284,12 @@ fi
try: try:
with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f: with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
for line in f: for line in f:
m = re.match("^service flash_recovery /system/(\S+)\s*$", line) m = re.match(r"^service flash_recovery /system/(\S+)\s*$", line)
if m: if m:
sh_location = m.group(1) sh_location = m.group(1)
print "putting script in", sh_location print "putting script in", sh_location
break break
except (OSError, IOError), e: except (OSError, IOError) as e:
print "failed to read init.rc: %s" % (e,) print "failed to read init.rc: %s" % (e,)
output_sink(sh_location, sh) output_sink(sh_location, sh)

View File

@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import os
import re import re
import common import common
@@ -36,7 +35,7 @@ class EdifyGenerator(object):
return x return x
@staticmethod @staticmethod
def _WordWrap(cmd, linelen=80): def WordWrap(cmd, linelen=80):
"""'cmd' should be a function call with null characters after each """'cmd' should be a function call with null characters after each
parameter (eg, "somefun(foo,\0bar,\0baz)"). This function wraps cmd parameter (eg, "somefun(foo,\0bar,\0baz)"). This function wraps cmd
to a given line length, replacing nulls with spaces and/or newlines to a given line length, replacing nulls with spaces and/or newlines
@@ -77,32 +76,30 @@ class EdifyGenerator(object):
cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || ' cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || '
'abort("This package expects the value \\"{value}\\" for ' 'abort("This package expects the value \\"{value}\\" for '
'\\"{name}\\" on the OEM partition; this has value \\"" + ' '\\"{name}\\" on the OEM partition; this has value \\"" + '
'file_getprop("/oem/oem.prop", "{name}") + "\\".");' 'file_getprop("/oem/oem.prop", "{name}") + "\\".");').format(
).format(name=name, value=value) name=name, value=value)
self.script.append(cmd) self.script.append(cmd)
def AssertSomeFingerprint(self, *fp): def AssertSomeFingerprint(self, *fp):
"""Assert that the current recovery build fingerprint is one of *fp.""" """Assert that the current recovery build fingerprint is one of *fp."""
if not fp: if not fp:
raise ValueError("must specify some fingerprints") raise ValueError("must specify some fingerprints")
cmd = ( cmd = (' ||\n '.join([('getprop("ro.build.fingerprint") == "%s"') % i
' ||\n '.join([('getprop("ro.build.fingerprint") == "%s"') for i in fp]) +
% i for i in fp]) +
' ||\n abort("Package expects build fingerprint of %s; this ' ' ||\n abort("Package expects build fingerprint of %s; this '
'device has " + getprop("ro.build.fingerprint") + ".");' 'device has " + getprop("ro.build.fingerprint") + ".");') % (
) % (" or ".join(fp),) " or ".join(fp))
self.script.append(cmd) self.script.append(cmd)
def AssertSomeThumbprint(self, *fp): def AssertSomeThumbprint(self, *fp):
"""Assert that the current recovery build thumbprint is one of *fp.""" """Assert that the current recovery build thumbprint is one of *fp."""
if not fp: if not fp:
raise ValueError("must specify some thumbprints") raise ValueError("must specify some thumbprints")
cmd = ( cmd = (' ||\n '.join([('getprop("ro.build.thumbprint") == "%s"') % i
' ||\n '.join([('getprop("ro.build.thumbprint") == "%s"') for i in fp]) +
% i for i in fp]) +
' ||\n abort("Package expects build thumbprint of %s; this ' ' ||\n abort("Package expects build thumbprint of %s; this '
'device has " + getprop("ro.build.thumbprint") + ".");' 'device has " + getprop("ro.build.thumbprint") + ".");') % (
) % (" or ".join(fp),) " or ".join(fp))
self.script.append(cmd) self.script.append(cmd)
def AssertOlderBuild(self, timestamp, timestamp_text): def AssertOlderBuild(self, timestamp, timestamp_text):
@@ -111,15 +108,15 @@ class EdifyGenerator(object):
self.script.append( self.script.append(
('(!less_than_int(%s, getprop("ro.build.date.utc"))) || ' ('(!less_than_int(%s, getprop("ro.build.date.utc"))) || '
'abort("Can\'t install this package (%s) over newer ' 'abort("Can\'t install this package (%s) over newer '
'build (" + getprop("ro.build.date") + ").");' 'build (" + getprop("ro.build.date") + ").");') % (timestamp,
) % (timestamp, timestamp_text)) timestamp_text))
def AssertDevice(self, device): def AssertDevice(self, device):
"""Assert that the device identifier is the given string.""" """Assert that the device identifier is the given string."""
cmd = ('getprop("ro.product.device") == "%s" || ' cmd = ('getprop("ro.product.device") == "%s" || '
'abort("This package is for \\"%s\\" devices; ' 'abort("This package is for \\"%s\\" devices; '
'this is a \\"" + getprop("ro.product.device") + "\\".");' 'this is a \\"" + getprop("ro.product.device") + "\\".");') % (
) % (device, device) device, device)
self.script.append(cmd) self.script.append(cmd)
def AssertSomeBootloader(self, *bootloaders): def AssertSomeBootloader(self, *bootloaders):
@@ -128,7 +125,7 @@ class EdifyGenerator(object):
" ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,) " ||\0".join(['getprop("ro.bootloader") == "%s"' % (b,)
for b in bootloaders]) + for b in bootloaders]) +
");") ");")
self.script.append(self._WordWrap(cmd)) self.script.append(self.WordWrap(cmd))
def ShowProgress(self, frac, dur): def ShowProgress(self, frac, dur):
"""Update the progress bar, advancing it over 'frac' over the next """Update the progress bar, advancing it over 'frac' over the next
@@ -180,9 +177,9 @@ class EdifyGenerator(object):
if "=" in option: if "=" in option:
key, value = option.split("=", 1) key, value = option.split("=", 1)
mount_dict[key] = value mount_dict[key] = value
self.script.append('mount("%s", "%s", "%s", "%s", "%s");' % self.script.append('mount("%s", "%s", "%s", "%s", "%s");' % (
(p.fs_type, common.PARTITION_TYPES[p.fs_type], p.fs_type, common.PARTITION_TYPES[p.fs_type], p.device,
p.device, p.mount_point, mount_dict.get(p.fs_type, ""))) p.mount_point, mount_dict.get(p.fs_type, "")))
self.mounts.add(p.mount_point) self.mounts.add(p.mount_point)
def UnpackPackageDir(self, src, dst): def UnpackPackageDir(self, src, dst):
@@ -205,18 +202,17 @@ class EdifyGenerator(object):
fstab = self.info.get("fstab", None) fstab = self.info.get("fstab", None)
if fstab: if fstab:
p = fstab[partition] p = fstab[partition]
if (p.fs_type not in ( "ext2", "ext3", "ext4")): if p.fs_type not in ("ext2", "ext3", "ext4"):
raise ValueError("Partition %s cannot be tuned\n" % (partition,)) raise ValueError("Partition %s cannot be tuned\n" % (partition,))
self.script.append('tune2fs(' + self.script.append(
"".join(['"%s", ' % (i,) for i in options]) + 'tune2fs(' + "".join(['"%s", ' % (i,) for i in options]) +
'"%s") || abort("Failed to tune partition %s");' '"%s") || abort("Failed to tune partition %s");' % (
% ( p.device,partition)); p.device, partition))
def FormatPartition(self, partition): def FormatPartition(self, partition):
"""Format the given partition, specified by its mount point (eg, """Format the given partition, specified by its mount point (eg,
"/system").""" "/system")."""
reserve_size = 0
fstab = self.info.get("fstab", None) fstab = self.info.get("fstab", None)
if fstab: if fstab:
p = fstab[partition] p = fstab[partition]
@@ -235,9 +231,10 @@ class EdifyGenerator(object):
def DeleteFiles(self, file_list): def DeleteFiles(self, file_list):
"""Delete all files in file_list.""" """Delete all files in file_list."""
if not file_list: return if not file_list:
return
cmd = "delete(" + ",\0".join(['"%s"' % (i,) for i in file_list]) + ");" cmd = "delete(" + ",\0".join(['"%s"' % (i,) for i in file_list]) + ");"
self.script.append(self._WordWrap(cmd)) self.script.append(self.WordWrap(cmd))
def RenameFile(self, srcfile, tgtfile): def RenameFile(self, srcfile, tgtfile):
"""Moves a file from one location to another.""" """Moves a file from one location to another."""
@@ -251,7 +248,7 @@ class EdifyGenerator(object):
skip the action if the file exists. Used when a patch skip the action if the file exists. Used when a patch
is later renamed.""" is later renamed."""
cmd = ('sha1_check(read_file("%s"), %s) || ' % (tgtfile, tgtsha1)) cmd = ('sha1_check(read_file("%s"), %s) || ' % (tgtfile, tgtsha1))
self.script.append(self._WordWrap(cmd)) self.script.append(self.WordWrap(cmd))
def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs): def ApplyPatch(self, srcfile, tgtfile, tgtsize, tgtsha1, *patchpairs):
"""Apply binary patches (in *patchpairs) to the given srcfile to """Apply binary patches (in *patchpairs) to the given srcfile to
@@ -265,7 +262,7 @@ class EdifyGenerator(object):
cmd.append(',\0%s, package_extract_file("%s")' % patchpairs[i:i+2]) cmd.append(',\0%s, package_extract_file("%s")' % patchpairs[i:i+2])
cmd.append(');') cmd.append(');')
cmd = "".join(cmd) cmd = "".join(cmd)
self.script.append(self._WordWrap(cmd)) self.script.append(self.WordWrap(cmd))
def WriteRawImage(self, mount_point, fn, mapfn=None): def WriteRawImage(self, mount_point, fn, mapfn=None):
"""Write the given package file into the partition for the given """Write the given package file into the partition for the given
@@ -289,33 +286,37 @@ class EdifyGenerator(object):
self.script.append( self.script.append(
'package_extract_file("%(fn)s", "%(device)s");' % args) 'package_extract_file("%(fn)s", "%(device)s");' % args)
else: else:
raise ValueError("don't know how to write \"%s\" partitions" % (p.fs_type,)) raise ValueError(
"don't know how to write \"%s\" partitions" % p.fs_type)
def SetPermissions(self, fn, uid, gid, mode, selabel, capabilities): def SetPermissions(self, fn, uid, gid, mode, selabel, capabilities):
"""Set file ownership and permissions.""" """Set file ownership and permissions."""
if not self.info.get("use_set_metadata", False): if not self.info.get("use_set_metadata", False):
self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn)) self.script.append('set_perm(%d, %d, 0%o, "%s");' % (uid, gid, mode, fn))
else: else:
if capabilities is None: capabilities = "0x0" if capabilities is None:
capabilities = "0x0"
cmd = 'set_metadata("%s", "uid", %d, "gid", %d, "mode", 0%o, ' \ cmd = 'set_metadata("%s", "uid", %d, "gid", %d, "mode", 0%o, ' \
'"capabilities", %s' % (fn, uid, gid, mode, capabilities) '"capabilities", %s' % (fn, uid, gid, mode, capabilities)
if selabel is not None: if selabel is not None:
cmd += ', "selabel", "%s"' % ( selabel ) cmd += ', "selabel", "%s"' % selabel
cmd += ');' cmd += ');'
self.script.append(cmd) self.script.append(cmd)
def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode, selabel, capabilities): def SetPermissionsRecursive(self, fn, uid, gid, dmode, fmode, selabel,
capabilities):
"""Recursively set path ownership and permissions.""" """Recursively set path ownership and permissions."""
if not self.info.get("use_set_metadata", False): if not self.info.get("use_set_metadata", False):
self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");' self.script.append('set_perm_recursive(%d, %d, 0%o, 0%o, "%s");'
% (uid, gid, dmode, fmode, fn)) % (uid, gid, dmode, fmode, fn))
else: else:
if capabilities is None: capabilities = "0x0" if capabilities is None:
capabilities = "0x0"
cmd = 'set_metadata_recursive("%s", "uid", %d, "gid", %d, ' \ cmd = 'set_metadata_recursive("%s", "uid", %d, "gid", %d, ' \
'"dmode", 0%o, "fmode", 0%o, "capabilities", %s' \ '"dmode", 0%o, "fmode", 0%o, "capabilities", %s' \
% (fn, uid, gid, dmode, fmode, capabilities) % (fn, uid, gid, dmode, fmode, capabilities)
if selabel is not None: if selabel is not None:
cmd += ', "selabel", "%s"' % ( selabel ) cmd += ', "selabel", "%s"' % selabel
cmd += ');' cmd += ');'
self.script.append(cmd) self.script.append(cmd)
@@ -328,15 +329,15 @@ class EdifyGenerator(object):
for dest, links in sorted(by_dest.iteritems()): for dest, links in sorted(by_dest.iteritems()):
cmd = ('symlink("%s", ' % (dest,) + cmd = ('symlink("%s", ' % (dest,) +
",\0".join(['"' + i + '"' for i in sorted(links)]) + ");") ",\0".join(['"' + i + '"' for i in sorted(links)]) + ");")
self.script.append(self._WordWrap(cmd)) self.script.append(self.WordWrap(cmd))
def AppendExtra(self, extra): def AppendExtra(self, extra):
"""Append text verbatim to the output script.""" """Append text verbatim to the output script."""
self.script.append(extra) self.script.append(extra)
def Unmount(self, mount_point): def Unmount(self, mount_point):
self.script.append('unmount("%s");' % (mount_point,)) self.script.append('unmount("%s");' % mount_point)
self.mounts.remove(mount_point); self.mounts.remove(mount_point)
def UnmountAll(self): def UnmountAll(self):
for p in sorted(self.mounts): for p in sorted(self.mounts):
@@ -359,4 +360,4 @@ class EdifyGenerator(object):
else: else:
data = open(input_path, "rb").read() data = open(input_path, "rb").read()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary", common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
data, perms=0755) data, perms=0o755)

View File

@@ -32,18 +32,10 @@ if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required." print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1) sys.exit(1)
import errno
import os import os
import re
import shutil import shutil
import subprocess
import tempfile
import zipfile import zipfile
# missing in Python 2.4 and before
if not hasattr(os, "SEEK_SET"):
os.SEEK_SET = 0
import common import common
OPTIONS = common.OPTIONS OPTIONS = common.OPTIONS
@@ -58,7 +50,7 @@ def CopyInfo(output_zip):
def main(argv): def main(argv):
bootable_only = [False] bootable_only = [False]
def option_handler(o, a): def option_handler(o, _):
if o in ("-z", "--bootable_zip"): if o in ("-z", "--bootable_zip"):
bootable_only[0] = True bootable_only[0] = True
else: else:
@@ -116,7 +108,7 @@ def main(argv):
boot_image = common.GetBootableImage( boot_image = common.GetBootableImage(
"boot.img", "boot.img", OPTIONS.input_tmp, "BOOT") "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
if boot_image: if boot_image:
boot_image.AddToZip(output_zip) boot_image.AddToZip(output_zip)
recovery_image = common.GetBootableImage( recovery_image = common.GetBootableImage(
"recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY") "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
if recovery_image: if recovery_image:
@@ -157,7 +149,7 @@ if __name__ == '__main__':
try: try:
common.CloseInheritedPipes() common.CloseInheritedPipes()
main(sys.argv[1:]) main(sys.argv[1:])
except common.ExternalError, e: except common.ExternalError as e:
print print
print " ERROR: %s" % (e,) print " ERROR: %s" % (e,)
print print

View File

@@ -1,53 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import os
import common
OPTIONS = common.OPTIONS
def main(argv):
# def option_handler(o, a):
# return False
args = common.ParseOptions(argv, __doc__)
input_dir, output_dir = args
OPTIONS.info_dict = common.LoadInfoDict(input_dir)
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
input_dir, "RECOVERY")
boot_img = common.GetBootableImage("boot.img", "boot.img",
input_dir, "BOOT")
if not recovery_img or not boot_img:
sys.exit(0)
def output_sink(fn, data):
with open(os.path.join(output_dir, "SYSTEM", *fn.split("/")), "wb") as f:
f.write(data)
common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
if __name__ == '__main__':
main(sys.argv[1:])

View File

@@ -0,0 +1 @@
make_recovery_patch.py

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import os
import common
OPTIONS = common.OPTIONS
def main(argv):
# def option_handler(o, a):
# return False
args = common.ParseOptions(argv, __doc__)
input_dir, output_dir = args
OPTIONS.info_dict = common.LoadInfoDict(input_dir)
recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
input_dir, "RECOVERY")
boot_img = common.GetBootableImage("boot.img", "boot.img",
input_dir, "BOOT")
if not recovery_img or not boot_img:
sys.exit(0)
def output_sink(fn, data):
with open(os.path.join(output_dir, "SYSTEM", *fn.split("/")), "wb") as f:
f.write(data)
common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
if __name__ == '__main__':
main(sys.argv[1:])

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
ota_from_target_files.py

File diff suppressed because it is too large Load Diff

382
tools/releasetools/pylintrc Normal file
View File

@@ -0,0 +1,382 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=1
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality.
optimize-ast=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time. See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=invalid-name,missing-docstring,too-many-branches,too-many-locals,too-many-arguments,too-many-statements,duplicate-code,too-few-public-methods,too-many-instance-attributes,too-many-lines,too-many-public-methods,locally-disabled,fixme
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (RP0004).
comment=no
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis
ignored-modules=
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,input
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=__.*__
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=LF
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View File

@@ -24,6 +24,7 @@ class RangeSet(object):
lots of runs.""" lots of runs."""
def __init__(self, data=None): def __init__(self, data=None):
self.monotonic = False
if isinstance(data, str): if isinstance(data, str):
self._parse_internal(data) self._parse_internal(data)
elif data: elif data:
@@ -185,7 +186,7 @@ class RangeSet(object):
# This is like intersect, but we can stop as soon as we discover the # This is like intersect, but we can stop as soon as we discover the
# output is going to be nonempty. # output is going to be nonempty.
z = 0 z = 0
for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))), for _, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
zip(other.data, itertools.cycle((+1, -1)))): zip(other.data, itertools.cycle((+1, -1)))):
if (z == 1 and d == 1) or (z == 2 and d == -1): if (z == 1 and d == 1) or (z == 2 and d == -1):
return True return True

View File

@@ -1,502 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Signs all the APK files in a target-files zipfile, producing a new
target-files zip.
Usage: sign_target_files_apks [flags] input_target_files output_target_files
-e (--extra_apks) <name,name,...=key>
Add extra APK name/key pairs as though they appeared in
apkcerts.txt (so mappings specified by -k and -d are applied).
Keys specified in -e override any value for that app contained
in the apkcerts.txt file. Option may be repeated to give
multiple extra packages.
-k (--key_mapping) <src_key=dest_key>
Add a mapping from the key name as specified in apkcerts.txt (the
src_key) to the real key you wish to sign the package with
(dest_key). Option may be repeated to give multiple key
mappings.
-d (--default_key_mappings) <dir>
Set up the following key mappings:
$devkey/devkey ==> $dir/releasekey
$devkey/testkey ==> $dir/releasekey
$devkey/media ==> $dir/media
$devkey/shared ==> $dir/shared
$devkey/platform ==> $dir/platform
where $devkey is the directory part of the value of
default_system_dev_certificate from the input target-files's
META/misc_info.txt. (Defaulting to "build/target/product/security"
if the value is not present in misc_info.
-d and -k options are added to the set of mappings in the order
in which they appear on the command line.
-o (--replace_ota_keys)
Replace the certificate (public key) used by OTA package
verification with the one specified in the input target_files
zip (in the META/otakeys.txt file). Key remapping (-k and -d)
is performed on this key.
-t (--tag_changes) <+tag>,<-tag>,...
Comma-separated list of changes to make to the set of tags (in
the last component of the build fingerprint). Prefix each with
'+' or '-' to indicate whether that tag should be added or
removed. Changes are processed in the order they appear.
Default value is "-test-keys,-dev-keys,+release-keys".
"""
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import base64
import cStringIO
import copy
import errno
import os
import re
import shutil
import subprocess
import tempfile
import zipfile
import add_img_to_target_files
import common
OPTIONS = common.OPTIONS
OPTIONS.extra_apks = {}
OPTIONS.key_map = {}
OPTIONS.replace_ota_keys = False
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
def GetApkCerts(tf_zip):
certmap = common.ReadApkCerts(tf_zip)
# apply the key remapping to the contents of the file
for apk, cert in certmap.iteritems():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
# apply all the -e options, overriding anything in the file
for apk, cert in OPTIONS.extra_apks.iteritems():
if not cert:
cert = "PRESIGNED"
certmap[apk] = OPTIONS.key_map.get(cert, cert)
return certmap
def CheckAllApksSigned(input_tf_zip, apk_key_map):
"""Check that all the APKs we want to sign have keys specified, and
error out if they don't."""
unknown_apks = []
for info in input_tf_zip.infolist():
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
if name not in apk_key_map:
unknown_apks.append(name)
if unknown_apks:
print "ERROR: no key specified for:\n\n ",
print "\n ".join(unknown_apks)
print "\nUse '-e <apkname>=' to specify a key (which may be an"
print "empty string to not sign this apk)."
sys.exit(1)
def SignApk(data, keyname, pw):
unsigned = tempfile.NamedTemporaryFile()
unsigned.write(data)
unsigned.flush()
signed = tempfile.NamedTemporaryFile()
common.SignFile(unsigned.name, signed.name, keyname, pw, align=4)
data = signed.read()
unsigned.close()
signed.close()
return data
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
apk_key_map, key_passwords):
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
rebuild_recovery = False
tmpdir = tempfile.mkdtemp()
def write_to_temp(fn, attr, data):
fn = os.path.join(tmpdir, fn)
if fn.endswith("/"):
fn = os.path.join(tmpdir, fn)
os.mkdir(fn)
else:
d = os.path.dirname(fn)
if d and not os.path.exists(d):
os.makedirs(d)
if attr >> 16 == 0xa1ff:
os.symlink(data, fn)
else:
with open(fn, "wb") as f:
f.write(data)
for info in input_tf_zip.infolist():
if info.filename.startswith("IMAGES/"): continue
data = input_tf_zip.read(info.filename)
out_info = copy.copy(info)
if (info.filename == "META/misc_info.txt" and
OPTIONS.replace_verity_private_key):
ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info, OPTIONS.replace_verity_private_key[1])
elif (info.filename == "BOOT/RAMDISK/verity_key" and
OPTIONS.replace_verity_public_key):
new_data = ReplaceVerityPublicKey(output_tf_zip, OPTIONS.replace_verity_public_key[1])
write_to_temp(info.filename, info.external_attr, new_data)
elif (info.filename.startswith("BOOT/") or
info.filename.startswith("RECOVERY/") or
info.filename.startswith("META/") or
info.filename == "SYSTEM/etc/recovery-resource.dat"):
write_to_temp(info.filename, info.external_attr, data)
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
key = apk_key_map[name]
if key not in common.SPECIAL_CERT_STRINGS:
print " signing: %-*s (%s)" % (maxsize, name, key)
signed_data = SignApk(data, key, key_passwords[key])
output_tf_zip.writestr(out_info, signed_data)
else:
# an APK we're not supposed to sign.
print "NOT signing: %s" % (name,)
output_tf_zip.writestr(out_info, data)
elif info.filename in ("SYSTEM/build.prop",
"VENDOR/build.prop",
"RECOVERY/RAMDISK/default.prop"):
print "rewriting %s:" % (info.filename,)
new_data = RewriteProps(data, misc_info)
output_tf_zip.writestr(out_info, new_data)
if info.filename == "RECOVERY/RAMDISK/default.prop":
write_to_temp(info.filename, info.external_attr, new_data)
elif info.filename.endswith("mac_permissions.xml"):
print "rewriting %s with new keys." % (info.filename,)
new_data = ReplaceCerts(data)
output_tf_zip.writestr(out_info, new_data)
elif info.filename in ("SYSTEM/recovery-from-boot.p",
"SYSTEM/bin/install-recovery.sh"):
rebuild_recovery = True
elif (OPTIONS.replace_ota_keys and
info.filename in ("RECOVERY/RAMDISK/res/keys",
"SYSTEM/etc/security/otacerts.zip")):
# don't copy these files if we're regenerating them below
pass
elif (OPTIONS.replace_verity_private_key and
info.filename == "META/misc_info.txt"):
pass
elif (OPTIONS.replace_verity_public_key and
info.filename == "BOOT/RAMDISK/verity_key"):
pass
else:
# a non-APK file; copy it verbatim
output_tf_zip.writestr(out_info, data)
if OPTIONS.replace_ota_keys:
new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
if new_recovery_keys:
write_to_temp("RECOVERY/RAMDISK/res/keys", 0755 << 16, new_recovery_keys)
if rebuild_recovery:
recovery_img = common.GetBootableImage(
"recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
boot_img = common.GetBootableImage(
"boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
def output_sink(fn, data):
output_tf_zip.writestr("SYSTEM/"+fn, data)
common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
info_dict=misc_info)
shutil.rmtree(tmpdir)
def ReplaceCerts(data):
"""Given a string of data, replace all occurences of a set
of X509 certs with a newer set of X509 certs and return
the updated data string."""
for old, new in OPTIONS.key_map.iteritems():
try:
if OPTIONS.verbose:
print " Replacing %s.x509.pem with %s.x509.pem" % (old, new)
f = open(old + ".x509.pem")
old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
f.close()
f = open(new + ".x509.pem")
new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
f.close()
# Only match entire certs.
pattern = "\\b"+old_cert16+"\\b"
(data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
if OPTIONS.verbose:
print " Replaced %d occurence(s) of %s.x509.pem with " \
"%s.x509.pem" % (num, old, new)
except IOError, e:
if (e.errno == errno.ENOENT and not OPTIONS.verbose):
continue
print " Error accessing %s. %s. Skip replacing %s.x509.pem " \
"with %s.x509.pem." % (e.filename, e.strerror, old, new)
return data
def EditTags(tags):
"""Given a string containing comma-separated tags, apply the edits
specified in OPTIONS.tag_changes and return the updated string."""
tags = set(tags.split(","))
for ch in OPTIONS.tag_changes:
if ch[0] == "-":
tags.discard(ch[1:])
elif ch[0] == "+":
tags.add(ch[1:])
return ",".join(sorted(tags))
def RewriteProps(data, misc_info):
output = []
for line in data.split("\n"):
line = line.strip()
original_line = line
if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
if (key in ("ro.build.fingerprint", "ro.vendor.build.fingerprint")
and misc_info.get("oem_fingerprint_properties") is None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif (key in ("ro.build.thumbprint", "ro.vendor.build.thumbprint")
and misc_info.get("oem_fingerprint_properties") is not None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif key == "ro.build.description":
pieces = value.split(" ")
assert len(pieces) == 5
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
elif key == "ro.build.tags":
value = EditTags(value)
elif key == "ro.build.display.id":
# change, eg, "JWR66N dev-keys" to "JWR66N"
value = value.split()
if len(value) > 1 and value[-1].endswith("-keys"):
value.pop()
value = " ".join(value)
line = key + "=" + value
if line != original_line:
print " replace: ", original_line
print " with: ", line
output.append(line)
return "\n".join(output) + "\n"
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
try:
keylist = input_tf_zip.read("META/otakeys.txt").split()
except KeyError:
raise common.ExternalError("can't read META/otakeys.txt from input")
extra_recovery_keys = misc_info.get("extra_recovery_keys", None)
if extra_recovery_keys:
extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
for k in extra_recovery_keys.split()]
if extra_recovery_keys:
print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
else:
extra_recovery_keys = []
mapped_keys = []
for k in keylist:
m = re.match(r"^(.*)\.x509\.pem$", k)
if not m:
raise common.ExternalError(
"can't parse \"%s\" from META/otakeys.txt" % (k,))
k = m.group(1)
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
if mapped_keys:
print "using:\n ", "\n ".join(mapped_keys)
print "for OTA package verification"
else:
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
mapped_keys.append(
OPTIONS.key_map.get(devkey, devkey) + ".x509.pem")
print "META/otakeys.txt has no keys; using", mapped_keys[0]
# recovery uses a version of the key that has been slightly
# predigested (by DumpPublicKey.java) and put in res/keys.
# extra_recovery_keys are used only in recovery.
p = common.Run(["java", "-jar",
os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
+ mapped_keys + extra_recovery_keys,
stdout=subprocess.PIPE)
new_recovery_keys, _ = p.communicate()
if p.returncode != 0:
raise common.ExternalError("failed to run dumpkeys")
common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys",
new_recovery_keys)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
tempfile = cStringIO.StringIO()
certs_zip = zipfile.ZipFile(tempfile, "w")
for k in mapped_keys:
certs_zip.write(k)
certs_zip.close()
common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
tempfile.getvalue())
return new_recovery_keys
def ReplaceVerityPublicKey(targetfile_zip, key_path):
print "Replacing verity public key with %s" % key_path
with open(key_path) as f:
data = f.read()
common.ZipWriteStr(targetfile_zip, "BOOT/RAMDISK/verity_key", data)
return data
def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip, misc_info, key_path):
print "Replacing verity private key with %s" % key_path
current_key = misc_info["verity_key"]
original_misc_info = targetfile_input_zip.read("META/misc_info.txt")
new_misc_info = original_misc_info.replace(current_key, key_path)
common.ZipWriteStr(targetfile_output_zip, "META/misc_info.txt", new_misc_info)
misc_info["verity_key"] = key_path
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
devkeydir = os.path.dirname(devkey)
OPTIONS.key_map.update({
devkeydir + "/testkey": d + "/releasekey",
devkeydir + "/devkey": d + "/releasekey",
devkeydir + "/media": d + "/media",
devkeydir + "/shared": d + "/shared",
devkeydir + "/platform": d + "/platform",
})
else:
OPTIONS.key_map[s] = d
def main(argv):
key_mapping_options = []
def option_handler(o, a):
if o in ("-e", "--extra_apks"):
names, key = a.split("=")
names = names.split(",")
for n in names:
OPTIONS.extra_apks[n] = key
elif o in ("-d", "--default_key_mappings"):
key_mapping_options.append((None, a))
elif o in ("-k", "--key_mapping"):
key_mapping_options.append(a.split("=", 1))
elif o in ("-o", "--replace_ota_keys"):
OPTIONS.replace_ota_keys = True
elif o in ("-t", "--tag_changes"):
new = []
for i in a.split(","):
i = i.strip()
if not i or i[0] not in "-+":
raise ValueError("Bad tag change '%s'" % (i,))
new.append(i[0] + i[1:].strip())
OPTIONS.tag_changes = tuple(new)
elif o == "--replace_verity_public_key":
OPTIONS.replace_verity_public_key = (True, a)
elif o == "--replace_verity_private_key":
OPTIONS.replace_verity_private_key = (True, a)
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="e:d:k:ot:",
extra_long_opts=["extra_apks=",
"default_key_mappings=",
"key_mapping=",
"replace_ota_keys",
"tag_changes=",
"replace_verity_public_key=",
"replace_verity_private_key="],
extra_option_handler=option_handler)
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w")
misc_info = common.LoadInfoDict(input_zip)
BuildKeyMap(misc_info, key_mapping_options)
apk_key_map = GetApkCerts(input_zip)
CheckAllApksSigned(input_zip, apk_key_map)
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
ProcessTargetFiles(input_zip, output_zip, misc_info,
apk_key_map, key_passwords)
input_zip.close()
output_zip.close()
add_img_to_target_files.AddImagesToTargetFiles(args[1])
print "done."
if __name__ == '__main__':
try:
main(sys.argv[1:])
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)

View File

@@ -0,0 +1 @@
sign_target_files_apks.py

View File

@@ -0,0 +1,506 @@
#!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Signs all the APK files in a target-files zipfile, producing a new
target-files zip.
Usage: sign_target_files_apks [flags] input_target_files output_target_files
-e (--extra_apks) <name,name,...=key>
Add extra APK name/key pairs as though they appeared in
apkcerts.txt (so mappings specified by -k and -d are applied).
Keys specified in -e override any value for that app contained
in the apkcerts.txt file. Option may be repeated to give
multiple extra packages.
-k (--key_mapping) <src_key=dest_key>
Add a mapping from the key name as specified in apkcerts.txt (the
src_key) to the real key you wish to sign the package with
(dest_key). Option may be repeated to give multiple key
mappings.
-d (--default_key_mappings) <dir>
Set up the following key mappings:
$devkey/devkey ==> $dir/releasekey
$devkey/testkey ==> $dir/releasekey
$devkey/media ==> $dir/media
$devkey/shared ==> $dir/shared
$devkey/platform ==> $dir/platform
where $devkey is the directory part of the value of
default_system_dev_certificate from the input target-files's
META/misc_info.txt. (Defaulting to "build/target/product/security"
if the value is not present in misc_info.
-d and -k options are added to the set of mappings in the order
in which they appear on the command line.
-o (--replace_ota_keys)
Replace the certificate (public key) used by OTA package
verification with the one specified in the input target_files
zip (in the META/otakeys.txt file). Key remapping (-k and -d)
is performed on this key.
-t (--tag_changes) <+tag>,<-tag>,...
Comma-separated list of changes to make to the set of tags (in
the last component of the build fingerprint). Prefix each with
'+' or '-' to indicate whether that tag should be added or
removed. Changes are processed in the order they appear.
Default value is "-test-keys,-dev-keys,+release-keys".
"""
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import base64
import cStringIO
import copy
import errno
import os
import re
import shutil
import subprocess
import tempfile
import zipfile
import add_img_to_target_files
import common
OPTIONS = common.OPTIONS
OPTIONS.extra_apks = {}
OPTIONS.key_map = {}
OPTIONS.replace_ota_keys = False
OPTIONS.replace_verity_public_key = False
OPTIONS.replace_verity_private_key = False
OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
def GetApkCerts(tf_zip):
certmap = common.ReadApkCerts(tf_zip)
# apply the key remapping to the contents of the file
for apk, cert in certmap.iteritems():
certmap[apk] = OPTIONS.key_map.get(cert, cert)
# apply all the -e options, overriding anything in the file
for apk, cert in OPTIONS.extra_apks.iteritems():
if not cert:
cert = "PRESIGNED"
certmap[apk] = OPTIONS.key_map.get(cert, cert)
return certmap
def CheckAllApksSigned(input_tf_zip, apk_key_map):
"""Check that all the APKs we want to sign have keys specified, and
error out if they don't."""
unknown_apks = []
for info in input_tf_zip.infolist():
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
if name not in apk_key_map:
unknown_apks.append(name)
if unknown_apks:
print "ERROR: no key specified for:\n\n ",
print "\n ".join(unknown_apks)
print "\nUse '-e <apkname>=' to specify a key (which may be an"
print "empty string to not sign this apk)."
sys.exit(1)
def SignApk(data, keyname, pw):
unsigned = tempfile.NamedTemporaryFile()
unsigned.write(data)
unsigned.flush()
signed = tempfile.NamedTemporaryFile()
common.SignFile(unsigned.name, signed.name, keyname, pw, align=4)
data = signed.read()
unsigned.close()
signed.close()
return data
def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
apk_key_map, key_passwords):
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
rebuild_recovery = False
tmpdir = tempfile.mkdtemp()
def write_to_temp(fn, attr, data):
fn = os.path.join(tmpdir, fn)
if fn.endswith("/"):
fn = os.path.join(tmpdir, fn)
os.mkdir(fn)
else:
d = os.path.dirname(fn)
if d and not os.path.exists(d):
os.makedirs(d)
if attr >> 16 == 0xa1ff:
os.symlink(data, fn)
else:
with open(fn, "wb") as f:
f.write(data)
for info in input_tf_zip.infolist():
if info.filename.startswith("IMAGES/"):
continue
data = input_tf_zip.read(info.filename)
out_info = copy.copy(info)
if (info.filename == "META/misc_info.txt" and
OPTIONS.replace_verity_private_key):
ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info,
OPTIONS.replace_verity_private_key[1])
elif (info.filename == "BOOT/RAMDISK/verity_key" and
OPTIONS.replace_verity_public_key):
new_data = ReplaceVerityPublicKey(output_tf_zip,
OPTIONS.replace_verity_public_key[1])
write_to_temp(info.filename, info.external_attr, new_data)
elif (info.filename.startswith("BOOT/") or
info.filename.startswith("RECOVERY/") or
info.filename.startswith("META/") or
info.filename == "SYSTEM/etc/recovery-resource.dat"):
write_to_temp(info.filename, info.external_attr, data)
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
key = apk_key_map[name]
if key not in common.SPECIAL_CERT_STRINGS:
print " signing: %-*s (%s)" % (maxsize, name, key)
signed_data = SignApk(data, key, key_passwords[key])
output_tf_zip.writestr(out_info, signed_data)
else:
# an APK we're not supposed to sign.
print "NOT signing: %s" % (name,)
output_tf_zip.writestr(out_info, data)
elif info.filename in ("SYSTEM/build.prop",
"VENDOR/build.prop",
"RECOVERY/RAMDISK/default.prop"):
print "rewriting %s:" % (info.filename,)
new_data = RewriteProps(data, misc_info)
output_tf_zip.writestr(out_info, new_data)
if info.filename == "RECOVERY/RAMDISK/default.prop":
write_to_temp(info.filename, info.external_attr, new_data)
elif info.filename.endswith("mac_permissions.xml"):
print "rewriting %s with new keys." % (info.filename,)
new_data = ReplaceCerts(data)
output_tf_zip.writestr(out_info, new_data)
elif info.filename in ("SYSTEM/recovery-from-boot.p",
"SYSTEM/bin/install-recovery.sh"):
rebuild_recovery = True
elif (OPTIONS.replace_ota_keys and
info.filename in ("RECOVERY/RAMDISK/res/keys",
"SYSTEM/etc/security/otacerts.zip")):
# don't copy these files if we're regenerating them below
pass
elif (OPTIONS.replace_verity_private_key and
info.filename == "META/misc_info.txt"):
pass
elif (OPTIONS.replace_verity_public_key and
info.filename == "BOOT/RAMDISK/verity_key"):
pass
else:
# a non-APK file; copy it verbatim
output_tf_zip.writestr(out_info, data)
if OPTIONS.replace_ota_keys:
new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
if new_recovery_keys:
write_to_temp("RECOVERY/RAMDISK/res/keys", 0o755 << 16, new_recovery_keys)
if rebuild_recovery:
recovery_img = common.GetBootableImage(
"recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
boot_img = common.GetBootableImage(
"boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
def output_sink(fn, data):
output_tf_zip.writestr("SYSTEM/"+fn, data)
common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
info_dict=misc_info)
shutil.rmtree(tmpdir)
def ReplaceCerts(data):
"""Given a string of data, replace all occurences of a set
of X509 certs with a newer set of X509 certs and return
the updated data string."""
for old, new in OPTIONS.key_map.iteritems():
try:
if OPTIONS.verbose:
print " Replacing %s.x509.pem with %s.x509.pem" % (old, new)
f = open(old + ".x509.pem")
old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
f.close()
f = open(new + ".x509.pem")
new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
f.close()
# Only match entire certs.
pattern = "\\b"+old_cert16+"\\b"
(data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
if OPTIONS.verbose:
print " Replaced %d occurence(s) of %s.x509.pem with " \
"%s.x509.pem" % (num, old, new)
except IOError as e:
if e.errno == errno.ENOENT and not OPTIONS.verbose:
continue
print " Error accessing %s. %s. Skip replacing %s.x509.pem " \
"with %s.x509.pem." % (e.filename, e.strerror, old, new)
return data
def EditTags(tags):
"""Given a string containing comma-separated tags, apply the edits
specified in OPTIONS.tag_changes and return the updated string."""
tags = set(tags.split(","))
for ch in OPTIONS.tag_changes:
if ch[0] == "-":
tags.discard(ch[1:])
elif ch[0] == "+":
tags.add(ch[1:])
return ",".join(sorted(tags))
def RewriteProps(data, misc_info):
output = []
for line in data.split("\n"):
line = line.strip()
original_line = line
if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
if (key in ("ro.build.fingerprint", "ro.vendor.build.fingerprint")
and misc_info.get("oem_fingerprint_properties") is None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif (key in ("ro.build.thumbprint", "ro.vendor.build.thumbprint")
and misc_info.get("oem_fingerprint_properties") is not None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
elif key == "ro.build.description":
pieces = value.split(" ")
assert len(pieces) == 5
pieces[-1] = EditTags(pieces[-1])
value = " ".join(pieces)
elif key == "ro.build.tags":
value = EditTags(value)
elif key == "ro.build.display.id":
# change, eg, "JWR66N dev-keys" to "JWR66N"
value = value.split()
if len(value) > 1 and value[-1].endswith("-keys"):
value.pop()
value = " ".join(value)
line = key + "=" + value
if line != original_line:
print " replace: ", original_line
print " with: ", line
output.append(line)
return "\n".join(output) + "\n"
def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info):
try:
keylist = input_tf_zip.read("META/otakeys.txt").split()
except KeyError:
raise common.ExternalError("can't read META/otakeys.txt from input")
extra_recovery_keys = misc_info.get("extra_recovery_keys", None)
if extra_recovery_keys:
extra_recovery_keys = [OPTIONS.key_map.get(k, k) + ".x509.pem"
for k in extra_recovery_keys.split()]
if extra_recovery_keys:
print "extra recovery-only key(s): " + ", ".join(extra_recovery_keys)
else:
extra_recovery_keys = []
mapped_keys = []
for k in keylist:
m = re.match(r"^(.*)\.x509\.pem$", k)
if not m:
raise common.ExternalError(
"can't parse \"%s\" from META/otakeys.txt" % (k,))
k = m.group(1)
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
if mapped_keys:
print "using:\n ", "\n ".join(mapped_keys)
print "for OTA package verification"
else:
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
mapped_keys.append(
OPTIONS.key_map.get(devkey, devkey) + ".x509.pem")
print "META/otakeys.txt has no keys; using", mapped_keys[0]
# recovery uses a version of the key that has been slightly
# predigested (by DumpPublicKey.java) and put in res/keys.
# extra_recovery_keys are used only in recovery.
p = common.Run(["java", "-jar",
os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
+ mapped_keys + extra_recovery_keys,
stdout=subprocess.PIPE)
new_recovery_keys, _ = p.communicate()
if p.returncode != 0:
raise common.ExternalError("failed to run dumpkeys")
common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys",
new_recovery_keys)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
# We DO NOT include the extra_recovery_keys (if any) here.
temp_file = cStringIO.StringIO()
certs_zip = zipfile.ZipFile(temp_file, "w")
for k in mapped_keys:
certs_zip.write(k)
certs_zip.close()
common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
temp_file.getvalue())
return new_recovery_keys
def ReplaceVerityPublicKey(targetfile_zip, key_path):
print "Replacing verity public key with %s" % key_path
with open(key_path) as f:
data = f.read()
common.ZipWriteStr(targetfile_zip, "BOOT/RAMDISK/verity_key", data)
return data
def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip,
misc_info, key_path):
print "Replacing verity private key with %s" % key_path
current_key = misc_info["verity_key"]
original_misc_info = targetfile_input_zip.read("META/misc_info.txt")
new_misc_info = original_misc_info.replace(current_key, key_path)
common.ZipWriteStr(targetfile_output_zip, "META/misc_info.txt", new_misc_info)
misc_info["verity_key"] = key_path
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
if s is None: # -d option
devkey = misc_info.get("default_system_dev_certificate",
"build/target/product/security/testkey")
devkeydir = os.path.dirname(devkey)
OPTIONS.key_map.update({
devkeydir + "/testkey": d + "/releasekey",
devkeydir + "/devkey": d + "/releasekey",
devkeydir + "/media": d + "/media",
devkeydir + "/shared": d + "/shared",
devkeydir + "/platform": d + "/platform",
})
else:
OPTIONS.key_map[s] = d
def main(argv):
key_mapping_options = []
def option_handler(o, a):
if o in ("-e", "--extra_apks"):
names, key = a.split("=")
names = names.split(",")
for n in names:
OPTIONS.extra_apks[n] = key
elif o in ("-d", "--default_key_mappings"):
key_mapping_options.append((None, a))
elif o in ("-k", "--key_mapping"):
key_mapping_options.append(a.split("=", 1))
elif o in ("-o", "--replace_ota_keys"):
OPTIONS.replace_ota_keys = True
elif o in ("-t", "--tag_changes"):
new = []
for i in a.split(","):
i = i.strip()
if not i or i[0] not in "-+":
raise ValueError("Bad tag change '%s'" % (i,))
new.append(i[0] + i[1:].strip())
OPTIONS.tag_changes = tuple(new)
elif o == "--replace_verity_public_key":
OPTIONS.replace_verity_public_key = (True, a)
elif o == "--replace_verity_private_key":
OPTIONS.replace_verity_private_key = (True, a)
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="e:d:k:ot:",
extra_long_opts=["extra_apks=",
"default_key_mappings=",
"key_mapping=",
"replace_ota_keys",
"tag_changes=",
"replace_verity_public_key=",
"replace_verity_private_key="],
extra_option_handler=option_handler)
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
input_zip = zipfile.ZipFile(args[0], "r")
output_zip = zipfile.ZipFile(args[1], "w")
misc_info = common.LoadInfoDict(input_zip)
BuildKeyMap(misc_info, key_mapping_options)
apk_key_map = GetApkCerts(input_zip)
CheckAllApksSigned(input_zip, apk_key_map)
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
ProcessTargetFiles(input_zip, output_zip, misc_info,
apk_key_map, key_passwords)
input_zip.close()
output_zip.close()
add_img_to_target_files.AddImagesToTargetFiles(args[1])
print "done."
if __name__ == '__main__':
try:
main(sys.argv[1:])
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)

View File

@@ -14,12 +14,11 @@
import bisect import bisect
import os import os
import sys
import struct import struct
import pprint
from hashlib import sha1 from hashlib import sha1
from rangelib import * import rangelib
class SparseImage(object): class SparseImage(object):
"""Wraps a sparse image file (and optional file map) into an image """Wraps a sparse image file (and optional file map) into an image
@@ -39,7 +38,6 @@ class SparseImage(object):
self.blocksize = blk_sz = header[5] self.blocksize = blk_sz = header[5]
self.total_blocks = total_blks = header[6] self.total_blocks = total_blks = header[6]
total_chunks = header[7] total_chunks = header[7]
image_checksum = header[8]
if magic != 0xED26FF3A: if magic != 0xED26FF3A:
raise ValueError("Magic should be 0xED26FF3A but is 0x%08X" % (magic,)) raise ValueError("Magic should be 0xED26FF3A but is 0x%08X" % (magic,))
@@ -64,7 +62,6 @@ class SparseImage(object):
header_bin = f.read(12) header_bin = f.read(12)
header = struct.unpack("<2H2I", header_bin) header = struct.unpack("<2H2I", header_bin)
chunk_type = header[0] chunk_type = header[0]
reserved1 = header[1]
chunk_sz = header[2] chunk_sz = header[2]
total_sz = header[3] total_sz = header[3]
data_sz = total_sz - 12 data_sz = total_sz - 12
@@ -102,7 +99,7 @@ class SparseImage(object):
raise ValueError("Unknown chunk type 0x%04X not supported" % raise ValueError("Unknown chunk type 0x%04X not supported" %
(chunk_type,)) (chunk_type,))
self.care_map = RangeSet(care_data) self.care_map = rangelib.RangeSet(care_data)
self.offset_index = [i[0] for i in offset_map] self.offset_index = [i[0] for i in offset_map]
if file_map_fn: if file_map_fn:
@@ -166,7 +163,7 @@ class SparseImage(object):
with open(fn) as f: with open(fn) as f:
for line in f: for line in f:
fn, ranges = line.split(None, 1) fn, ranges = line.split(None, 1)
ranges = RangeSet.parse(ranges) ranges = rangelib.RangeSet.parse(ranges)
out[fn] = ranges out[fn] = ranges
assert ranges.size() == ranges.intersect(remaining).size() assert ranges.size() == ranges.intersect(remaining).size()
remaining = remaining.subtract(ranges) remaining = remaining.subtract(ranges)
@@ -186,7 +183,7 @@ class SparseImage(object):
for s, e in remaining: for s, e in remaining:
for b in range(s, e): for b in range(s, e):
idx = bisect.bisect_right(self.offset_index, b) - 1 idx = bisect.bisect_right(self.offset_index, b) - 1
chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx] chunk_start, _, filepos, fill_data = self.offset_map[idx]
if filepos is not None: if filepos is not None:
filepos += (b-chunk_start) * self.blocksize filepos += (b-chunk_start) * self.blocksize
f.seek(filepos, os.SEEK_SET) f.seek(filepos, os.SEEK_SET)
@@ -204,8 +201,8 @@ class SparseImage(object):
nonzero_blocks.append(b) nonzero_blocks.append(b)
nonzero_blocks.append(b+1) nonzero_blocks.append(b+1)
out["__ZERO"] = RangeSet(data=zero_blocks) out["__ZERO"] = rangelib.RangeSet(data=zero_blocks)
out["__NONZERO"] = RangeSet(data=nonzero_blocks) out["__NONZERO"] = rangelib.RangeSet(data=nonzero_blocks)
def ResetFileMap(self): def ResetFileMap(self):
"""Throw away the file map and treat the entire image as """Throw away the file map and treat the entire image as