Add support for clobbered blocks

In ext4 filesystems, some blocks might be changed even being mounted
R/O, such as the superblock (block 0). We need to exclude such blocks
from integrity verification. Plus such blocks should always be
written to the target by copying instead of patching.

Bug: http://b/20939131
Change-Id: I991169ec307dfb231b2fe8908a0668595ecb2060
This commit is contained in:
Tao Bao
2015-05-12 11:42:31 -07:00
parent 4415894579
commit ff7778166b
4 changed files with 60 additions and 21 deletions

View File

@@ -82,6 +82,7 @@ class EmptyImage(Image):
"""A zero-length image.""" """A zero-length image."""
blocksize = 4096 blocksize = 4096
care_map = RangeSet() care_map = RangeSet()
clobbered_blocks = RangeSet()
total_blocks = 0 total_blocks = 0
file_map = {} file_map = {}
def ReadRangeSet(self, ranges): def ReadRangeSet(self, ranges):
@@ -114,6 +115,7 @@ class DataImage(Image):
self.total_blocks = len(self.data) / self.blocksize self.total_blocks = len(self.data) / self.blocksize
self.care_map = RangeSet(data=(0, self.total_blocks)) self.care_map = RangeSet(data=(0, self.total_blocks))
self.clobbered_blocks = RangeSet()
zero_blocks = [] zero_blocks = []
nonzero_blocks = [] nonzero_blocks = []
@@ -135,6 +137,8 @@ class DataImage(Image):
return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges] return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges]
def TotalSha1(self): def TotalSha1(self):
# DataImage always carries empty clobbered_blocks.
assert self.clobbered_blocks.size() == 0
return sha1(self.data).hexdigest() return sha1(self.data).hexdigest()
@@ -184,6 +188,10 @@ class Transfer(object):
# (Typically a domain is a file, and the key in file_map is the # (Typically a domain is a file, and the key in file_map is the
# pathname.) # pathname.)
# #
# clobbered_blocks: a RangeSet containing which blocks contain data
# but may be altered by the FS. They need to be excluded when
# verifying the partition integrity.
#
# ReadRangeSet(): a function that takes a RangeSet and returns the # ReadRangeSet(): a function that takes a RangeSet and returns the
# data contained in the image blocks of that RangeSet. The data # data contained in the image blocks of that RangeSet. The data
# is returned as a list or tuple of strings; concatenating the # is returned as a list or tuple of strings; concatenating the
@@ -193,7 +201,7 @@ class Transfer(object):
# #
# TotalSha1(): a function that returns (as a hex string) the SHA-1 # TotalSha1(): a function that returns (as a hex string) the SHA-1
# hash of all the data in the image (ie, all the blocks in the # hash of all the data in the image (ie, all the blocks in the
# care_map) # care_map minus clobbered_blocks).
# #
# When creating a BlockImageDiff, the src image may be None, in which # When creating a BlockImageDiff, the src image may be None, in which
# case the list of transfers produced will never read from the # case the list of transfers produced will never read from the
@@ -445,7 +453,6 @@ class BlockImageDiff(object):
if free_string: if free_string:
out.append("".join(free_string)) out.append("".join(free_string))
# sanity check: abort if we're going to need more than 512 MB if # sanity check: abort if we're going to need more than 512 MB if
# stash space # stash space
assert max_stashed_blocks * self.tgt.blocksize < (512 << 20) assert max_stashed_blocks * self.tgt.blocksize < (512 << 20)
@@ -845,6 +852,12 @@ class BlockImageDiff(object):
"zero", self.transfers) "zero", self.transfers)
continue continue
elif tgt_fn == "__COPY":
# "__COPY" domain includes all the blocks not contained in any
# file and that need to be copied unconditionally to the target.
Transfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
continue
elif tgt_fn in self.src.file_map: elif tgt_fn in self.src.file_map:
# Look for an exact pathname match in the source. # Look for an exact pathname match in the source.
Transfer(tgt_fn, tgt_fn, tgt_ranges, self.src.file_map[tgt_fn], Transfer(tgt_fn, tgt_fn, tgt_ranges, self.src.file_map[tgt_fn],

View File

@@ -1141,6 +1141,9 @@ class BlockDifference(object):
self.partition = partition self.partition = partition
self.check_first_block = check_first_block self.check_first_block = check_first_block
# Due to http://b/20939131, check_first_block is disabled temporarily.
assert not self.check_first_block
if version is None: if version is None:
version = 1 version = 1
if OPTIONS.info_dict: if OPTIONS.info_dict:
@@ -1174,18 +1177,18 @@ class BlockDifference(object):
if not self.src: if not self.src:
script.Print("Image %s will be patched unconditionally." % (partition,)) script.Print("Image %s will be patched unconditionally." % (partition,))
else: else:
ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
ranges_str = ranges.to_string_raw()
if self.version >= 3: if self.version >= 3:
script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || ' script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
'block_image_verify("%s", ' 'block_image_verify("%s", '
'package_extract_file("%s.transfer.list"), ' 'package_extract_file("%s.transfer.list"), '
'"%s.new.dat", "%s.patch.dat")) then') % ( '"%s.new.dat", "%s.patch.dat")) then') % (
self.device, self.src.care_map.to_string_raw(), self.device, ranges_str, self.src.TotalSha1(),
self.src.TotalSha1(),
self.device, partition, partition, partition)) self.device, partition, partition, partition))
else: else:
script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % ( script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
self.device, self.src.care_map.to_string_raw(), self.device, ranges_str, self.src.TotalSha1()))
self.src.TotalSha1()))
script.Print('Verified %s image...' % (partition,)) script.Print('Verified %s image...' % (partition,))
script.AppendExtra('else') script.AppendExtra('else')
@@ -1233,6 +1236,9 @@ class BlockDifference(object):
return ctx.hexdigest() return ctx.hexdigest()
# TODO(tbao): Due to http://b/20939131, block 0 may be changed without
# remounting R/W. Will change the checking to a finer-grained way to
# mask off those bits.
def _CheckFirstBlock(self, script): def _CheckFirstBlock(self, script):
r = rangelib.RangeSet((0, 1)) r = rangelib.RangeSet((0, 1))
srchash = self._HashBlocks(self.src, r) srchash = self._HashBlocks(self.src, r)

View File

@@ -475,7 +475,13 @@ def GetImage(which, tmpdir, info_dict):
path = add_img_to_target_files.BuildVendor( path = add_img_to_target_files.BuildVendor(
tmpdir, info_dict, block_list=mappath) tmpdir, info_dict, block_list=mappath)
return sparse_img.SparseImage(path, mappath) # Bug: http://b/20939131
# In ext4 filesystems, block 0 might be changed even being mounted
# R/O. We add it to clobbered_blocks so that it will be written to the
# target unconditionally. Note that they are still part of care_map.
clobbered_blocks = "0"
return sparse_img.SparseImage(path, mappath, clobbered_blocks)
def WriteFullOTAPackage(input_zip, output_zip): def WriteFullOTAPackage(input_zip, output_zip):
@@ -773,7 +779,6 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(",")) OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
system_diff = common.BlockDifference("system", system_tgt, system_src, system_diff = common.BlockDifference("system", system_tgt, system_src,
check_first_block=True,
version=blockimgdiff_version) version=blockimgdiff_version)
if HasVendorPartition(target_zip): if HasVendorPartition(target_zip):
@@ -784,7 +789,6 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
vendor_tgt = GetImage("vendor", OPTIONS.target_tmp, vendor_tgt = GetImage("vendor", OPTIONS.target_tmp,
OPTIONS.target_info_dict) OPTIONS.target_info_dict)
vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src, vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
check_first_block=True,
version=blockimgdiff_version) version=blockimgdiff_version)
else: else:
vendor_diff = None vendor_diff = None

View File

@@ -21,10 +21,17 @@ import rangelib
class SparseImage(object): class SparseImage(object):
"""Wraps a sparse image file (and optional file map) into an image """Wraps a sparse image file into an image object.
object suitable for passing to BlockImageDiff."""
def __init__(self, simg_fn, file_map_fn=None): Wraps a sparse image file (and optional file map and clobbered_blocks) into
an image object suitable for passing to BlockImageDiff. file_map contains
the mapping between files and their blocks. clobbered_blocks contains the set
of blocks that should be always written to the target regardless of the old
contents (i.e. copying instead of patching). clobbered_blocks should be in
the form of a string like "0" or "0 1-5 8".
"""
def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None):
self.simg_f = f = open(simg_fn, "rb") self.simg_f = f = open(simg_fn, "rb")
header_bin = f.read(28) header_bin = f.read(28)
@@ -57,6 +64,7 @@ class SparseImage(object):
pos = 0 # in blocks pos = 0 # in blocks
care_data = [] care_data = []
self.offset_map = offset_map = [] self.offset_map = offset_map = []
self.clobbered_blocks = rangelib.RangeSet(data=clobbered_blocks)
for i in range(total_chunks): for i in range(total_chunks):
header_bin = f.read(12) header_bin = f.read(12)
@@ -103,7 +111,7 @@ class SparseImage(object):
self.offset_index = [i[0] for i in offset_map] self.offset_index = [i[0] for i in offset_map]
if file_map_fn: if file_map_fn:
self.LoadFileBlockMap(file_map_fn) self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks)
else: else:
self.file_map = {"__DATA": self.care_map} self.file_map = {"__DATA": self.care_map}
@@ -111,9 +119,10 @@ class SparseImage(object):
return [d for d in self._GetRangeData(ranges)] return [d for d in self._GetRangeData(ranges)]
def TotalSha1(self): def TotalSha1(self):
"""Return the SHA-1 hash of all data in the 'care' regions of this image.""" """Return the SHA-1 hash of all data in the 'care' regions but not in
clobbered_blocks of this image."""
h = sha1() h = sha1()
for d in self._GetRangeData(self.care_map): for d in self._GetRangeData(self.care_map.subtract(self.clobbered_blocks)):
h.update(d) h.update(d)
return h.hexdigest() return h.hexdigest()
@@ -156,7 +165,7 @@ class SparseImage(object):
yield fill_data * (this_read * (self.blocksize >> 2)) yield fill_data * (this_read * (self.blocksize >> 2))
to_read -= this_read to_read -= this_read
def LoadFileBlockMap(self, fn): def LoadFileBlockMap(self, fn, clobbered_blocks):
remaining = self.care_map remaining = self.care_map
self.file_map = out = {} self.file_map = out = {}
@@ -166,14 +175,20 @@ class SparseImage(object):
ranges = rangelib.RangeSet.parse(ranges) ranges = rangelib.RangeSet.parse(ranges)
out[fn] = ranges out[fn] = ranges
assert ranges.size() == ranges.intersect(remaining).size() assert ranges.size() == ranges.intersect(remaining).size()
# Currently we assume that blocks in clobbered_blocks are not part of
# any file.
assert not clobbered_blocks.overlaps(ranges)
remaining = remaining.subtract(ranges) remaining = remaining.subtract(ranges)
remaining = remaining.subtract(clobbered_blocks)
# For all the remaining blocks in the care_map (ie, those that # For all the remaining blocks in the care_map (ie, those that
# aren't part of the data for any file), divide them into blocks # aren't part of the data for any file nor part of the clobbered_blocks),
# that are all zero and blocks that aren't. (Zero blocks are # divide them into blocks that are all zero and blocks that aren't.
# handled specially because (1) there are usually a lot of them # (Zero blocks are handled specially because (1) there are usually
# and (2) bsdiff handles files with long sequences of repeated # a lot of them and (2) bsdiff handles files with long sequences of
# bytes especially poorly.) # repeated bytes especially poorly.)
zero_blocks = [] zero_blocks = []
nonzero_blocks = [] nonzero_blocks = []
@@ -203,6 +218,7 @@ class SparseImage(object):
out["__ZERO"] = rangelib.RangeSet(data=zero_blocks) out["__ZERO"] = rangelib.RangeSet(data=zero_blocks)
out["__NONZERO"] = rangelib.RangeSet(data=nonzero_blocks) out["__NONZERO"] = rangelib.RangeSet(data=nonzero_blocks)
out["__COPY"] = clobbered_blocks
def ResetFileMap(self): def ResetFileMap(self):
"""Throw away the file map and treat the entire image as """Throw away the file map and treat the entire image as