diff options
Diffstat (limited to 'scripts/update_payload')
-rw-r--r-- | scripts/update_payload/__init__.py | 2 | ||||
-rw-r--r-- | scripts/update_payload/applier.py | 170 | ||||
-rw-r--r-- | scripts/update_payload/checker.py | 318 | ||||
-rwxr-xr-x | scripts/update_payload/checker_unittest.py | 482 | ||||
-rw-r--r-- | scripts/update_payload/common.py | 27 | ||||
-rw-r--r-- | scripts/update_payload/format_utils.py | 8 | ||||
-rwxr-xr-x | scripts/update_payload/format_utils_unittest.py | 7 | ||||
-rw-r--r-- | scripts/update_payload/histogram.py | 5 | ||||
-rwxr-xr-x | scripts/update_payload/histogram_unittest.py | 7 | ||||
-rw-r--r-- | scripts/update_payload/payload.py | 46 | ||||
-rw-r--r-- | scripts/update_payload/test_utils.py | 74 | ||||
-rw-r--r-- | scripts/update_payload/update_metadata_pb2.py | 321 |
12 files changed, 677 insertions, 790 deletions
diff --git a/scripts/update_payload/__init__.py b/scripts/update_payload/__init__.py index 8ee95e20..6e77678d 100644 --- a/scripts/update_payload/__init__.py +++ b/scripts/update_payload/__init__.py @@ -17,6 +17,8 @@ """Library for processing, verifying and applying Chrome OS update payloads.""" # Just raise the interface classes to the root namespace. +from __future__ import absolute_import + from update_payload.checker import CHECKS_TO_DISABLE from update_payload.error import PayloadError from update_payload.payload import Payload diff --git a/scripts/update_payload/applier.py b/scripts/update_payload/applier.py index 21d8e875..29ccb8e5 100644 --- a/scripts/update_payload/applier.py +++ b/scripts/update_payload/applier.py @@ -24,12 +24,12 @@ payload. The interface for invoking the applier is as follows: """ +from __future__ import absolute_import from __future__ import print_function import array import bz2 import hashlib -import itertools # Not everywhere we can have the lzma library so we ignore it if we didn't have # it because it is not going to be used. For example, 'cros flash' uses # devserver code which eventually loads this file, but the lzma library is not @@ -45,7 +45,6 @@ except ImportError: except ImportError: pass import os -import shutil import subprocess import sys import tempfile @@ -53,7 +52,6 @@ import tempfile from update_payload import common from update_payload.error import PayloadError - # # Helper functions. # @@ -72,7 +70,7 @@ def _VerifySha256(file_obj, expected_hash, name, length=-1): """ hasher = hashlib.sha256() block_length = 1024 * 1024 - max_length = length if length >= 0 else sys.maxint + max_length = length if length >= 0 else sys.maxsize while max_length > 0: read_length = min(max_length, block_length) @@ -108,20 +106,16 @@ def _ReadExtents(file_obj, extents, block_size, max_length=-1): Returns: A character array containing the concatenated read data. """ - data = array.array('c') + data = array.array('B') if max_length < 0: - max_length = sys.maxint + max_length = sys.maxsize for ex in extents: if max_length == 0: break read_length = min(max_length, ex.num_blocks * block_size) - # Fill with zeros or read from file, depending on the type of extent. - if ex.start_block == common.PSEUDO_EXTENT_MARKER: - data.extend(itertools.repeat('\0', read_length)) - else: - file_obj.seek(ex.start_block * block_size) - data.fromfile(file_obj, read_length) + file_obj.seek(ex.start_block * block_size) + data.fromfile(file_obj, read_length) max_length -= read_length @@ -149,12 +143,8 @@ def _WriteExtents(file_obj, data, extents, block_size, base_name): if not data_length: raise PayloadError('%s: more write extents than data' % ex_name) write_length = min(data_length, ex.num_blocks * block_size) - - # Only do actual writing if this is not a pseudo-extent. - if ex.start_block != common.PSEUDO_EXTENT_MARKER: - file_obj.seek(ex.start_block * block_size) - data_view = buffer(data, data_offset, write_length) - file_obj.write(data_view) + file_obj.seek(ex.start_block * block_size) + file_obj.write(data[data_offset:(data_offset + write_length)]) data_offset += write_length data_length -= write_length @@ -184,20 +174,17 @@ def _ExtentsToBspatchArg(extents, block_size, base_name, data_length=-1): arg = '' pad_off = pad_len = 0 if data_length < 0: - data_length = sys.maxint + data_length = sys.maxsize for ex, ex_name in common.ExtentIter(extents, base_name): if not data_length: raise PayloadError('%s: more extents than total data length' % ex_name) - is_pseudo = ex.start_block == common.PSEUDO_EXTENT_MARKER - start_byte = -1 if is_pseudo else ex.start_block * block_size + start_byte = ex.start_block * block_size num_bytes = ex.num_blocks * block_size if data_length < num_bytes: # We're only padding a real extent. - if not is_pseudo: - pad_off = start_byte + data_length - pad_len = num_bytes - data_length - + pad_off = start_byte + data_length + pad_len = num_bytes - data_length num_bytes = data_length arg += '%s%d:%d' % (arg and ',', start_byte, num_bytes) @@ -274,30 +261,28 @@ class PayloadApplier(object): num_blocks = ex.num_blocks count = num_blocks * block_size - # Make sure it's not a fake (signature) operation. - if start_block != common.PSEUDO_EXTENT_MARKER: - data_end = data_start + count + data_end = data_start + count - # Make sure we're not running past partition boundary. - if (start_block + num_blocks) * block_size > part_size: - raise PayloadError( - '%s: extent (%s) exceeds partition size (%d)' % - (ex_name, common.FormatExtent(ex, block_size), - part_size)) + # Make sure we're not running past partition boundary. + if (start_block + num_blocks) * block_size > part_size: + raise PayloadError( + '%s: extent (%s) exceeds partition size (%d)' % + (ex_name, common.FormatExtent(ex, block_size), + part_size)) - # Make sure that we have enough data to write. - if data_end >= data_length + block_size: - raise PayloadError( - '%s: more dst blocks than data (even with padding)') + # Make sure that we have enough data to write. + if data_end >= data_length + block_size: + raise PayloadError( + '%s: more dst blocks than data (even with padding)') - # Pad with zeros if necessary. - if data_end > data_length: - padding = data_end - data_length - out_data += '\0' * padding + # Pad with zeros if necessary. + if data_end > data_length: + padding = data_end - data_length + out_data += b'\0' * padding - self.payload.payload_file.seek(start_block * block_size) - part_file.seek(start_block * block_size) - part_file.write(out_data[data_start:data_end]) + self.payload.payload_file.seek(start_block * block_size) + part_file.seek(start_block * block_size) + part_file.write(out_data[data_start:data_end]) data_start += count @@ -306,30 +291,6 @@ class PayloadApplier(object): raise PayloadError('%s: wrote fewer bytes (%d) than expected (%d)' % (op_name, data_start, data_length)) - def _ApplyMoveOperation(self, op, op_name, part_file): - """Applies a MOVE operation. - - Note that this operation must read the whole block data from the input and - only then dump it, due to our in-place update semantics; otherwise, it - might clobber data midway through. - - Args: - op: the operation object - op_name: name string for error reporting - part_file: the partition file object - - Raises: - PayloadError if something goes wrong. - """ - block_size = self.block_size - - # Gather input raw data from src extents. - in_data = _ReadExtents(part_file, op.src_extents, block_size) - - # Dump extracted data to dst extents. - _WriteExtents(part_file, in_data, op.dst_extents, block_size, - '%s.dst_extents' % op_name) - def _ApplyZeroOperation(self, op, op_name, part_file): """Applies a ZERO operation. @@ -347,10 +308,8 @@ class PayloadApplier(object): # Iterate over the extents and write zero. # pylint: disable=unused-variable for ex, ex_name in common.ExtentIter(op.dst_extents, base_name): - # Only do actual writing if this is not a pseudo-extent. - if ex.start_block != common.PSEUDO_EXTENT_MARKER: - part_file.seek(ex.start_block * block_size) - part_file.write('\0' * (ex.num_blocks * block_size)) + part_file.seek(ex.start_block * block_size) + part_file.write(b'\0' * (ex.num_blocks * block_size)) def _ApplySourceCopyOperation(self, op, op_name, old_part_file, new_part_file): @@ -439,12 +398,19 @@ class PayloadApplier(object): # Diff from source partition. old_file_name = '/dev/fd/%d' % old_part_file.fileno() - if op.type in (common.OpType.BSDIFF, common.OpType.SOURCE_BSDIFF, - common.OpType.BROTLI_BSDIFF): + # In python3, file descriptors(fd) are not passed to child processes by + # default. To pass the fds to the child processes, we need to set the flag + # 'inheritable' in the fds and make the subprocess calls with the argument + # close_fds set to False. + if sys.version_info.major >= 3: + os.set_inheritable(new_part_file.fileno(), True) + os.set_inheritable(old_part_file.fileno(), True) + + if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF): # Invoke bspatch on partition file with extents args. bspatch_cmd = [self.bspatch_path, old_file_name, new_file_name, patch_file_name, in_extents_arg, out_extents_arg] - subprocess.check_call(bspatch_cmd) + subprocess.check_call(bspatch_cmd, close_fds=False) elif op.type == common.OpType.PUFFDIFF: # Invoke puffpatch on partition file with extents args. puffpatch_cmd = [self.puffpatch_path, @@ -454,14 +420,14 @@ class PayloadApplier(object): "--patch_file=%s" % patch_file_name, "--src_extents=%s" % in_extents_arg, "--dst_extents=%s" % out_extents_arg] - subprocess.check_call(puffpatch_cmd) + subprocess.check_call(puffpatch_cmd, close_fds=False) else: - raise PayloadError("Unknown operation %s", op.type) + raise PayloadError("Unknown operation %s" % op.type) # Pad with zeros past the total output length. if pad_len: new_part_file.seek(pad_off) - new_part_file.write('\0' * pad_len) + new_part_file.write(b'\0' * pad_len) else: # Gather input raw data and write to a temp file. input_part_file = old_part_file if old_part_file else new_part_file @@ -477,8 +443,7 @@ class PayloadApplier(object): with tempfile.NamedTemporaryFile(delete=False) as out_file: out_file_name = out_file.name - if op.type in (common.OpType.BSDIFF, common.OpType.SOURCE_BSDIFF, - common.OpType.BROTLI_BSDIFF): + if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF): # Invoke bspatch. bspatch_cmd = [self.bspatch_path, in_file_name, out_file_name, patch_file_name] @@ -492,7 +457,7 @@ class PayloadApplier(object): "--patch_file=%s" % patch_file_name] subprocess.check_call(puffpatch_cmd) else: - raise PayloadError("Unknown operation %s", op.type) + raise PayloadError("Unknown operation %s" % op.type) # Read output. with open(out_file_name, 'rb') as out_file: @@ -505,7 +470,7 @@ class PayloadApplier(object): # Write output back to partition, with padding. unaligned_out_len = len(out_data) % block_size if unaligned_out_len: - out_data += '\0' * (block_size - unaligned_out_len) + out_data += b'\0' * (block_size - unaligned_out_len) _WriteExtents(new_part_file, out_data, op.dst_extents, block_size, '%s.dst_extents' % op_name) @@ -520,10 +485,6 @@ class PayloadApplier(object): new_part_file, part_size): """Applies a sequence of update operations to a partition. - This assumes an in-place update semantics for MOVE and BSDIFF, namely all - reads are performed first, then the data is processed and written back to - the same file. - Args: operations: the sequence of operations base_name: the name of the operation sequence @@ -541,13 +502,8 @@ class PayloadApplier(object): if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ, common.OpType.REPLACE_XZ): self._ApplyReplaceOperation(op, op_name, data, new_part_file, part_size) - elif op.type == common.OpType.MOVE: - self._ApplyMoveOperation(op, op_name, new_part_file) elif op.type == common.OpType.ZERO: self._ApplyZeroOperation(op, op_name, new_part_file) - elif op.type == common.OpType.BSDIFF: - self._ApplyDiffOperation(op, op_name, data, new_part_file, - new_part_file) elif op.type == common.OpType.SOURCE_COPY: self._ApplySourceCopyOperation(op, op_name, old_part_file, new_part_file) @@ -583,15 +539,8 @@ class PayloadApplier(object): _VerifySha256(old_part_file, old_part_info.hash, 'old ' + part_name, length=old_part_info.size) new_part_file_mode = 'r+b' - if self.minor_version == common.INPLACE_MINOR_PAYLOAD_VERSION: - # Copy the src partition to the dst one; make sure we don't truncate it. - shutil.copyfile(old_part_file_name, new_part_file_name) - elif self.minor_version >= common.SOURCE_MINOR_PAYLOAD_VERSION: - # In minor version >= 2, we don't want to copy the partitions, so - # instead just make the new partition file. - open(new_part_file_name, 'w').close() - else: - raise PayloadError("Unknown minor version: %d" % self.minor_version) + open(new_part_file_name, 'w').close() + else: # We need to create/truncate the dst partition file. new_part_file_mode = 'w+b' @@ -639,20 +588,11 @@ class PayloadApplier(object): install_operations = [] manifest = self.payload.manifest - if self.payload.header.version == 1: - for real_name, proto_name in common.CROS_PARTITIONS: - new_part_info[real_name] = getattr(manifest, 'new_%s_info' % proto_name) - old_part_info[real_name] = getattr(manifest, 'old_%s_info' % proto_name) - - install_operations.append((common.ROOTFS, manifest.install_operations)) - install_operations.append((common.KERNEL, - manifest.kernel_install_operations)) - else: - for part in manifest.partitions: - name = part.partition_name - new_part_info[name] = part.new_partition_info - old_part_info[name] = part.old_partition_info - install_operations.append((name, part.operations)) + for part in manifest.partitions: + name = part.partition_name + new_part_info[name] = part.new_partition_info + old_part_info[name] = part.old_partition_info + install_operations.append((name, part.operations)) part_names = set(new_part_info.keys()) # Equivalently, old_part_info.keys() diff --git a/scripts/update_payload/checker.py b/scripts/update_payload/checker.py index 6d17fbe5..56a93708 100644 --- a/scripts/update_payload/checker.py +++ b/scripts/update_payload/checker.py @@ -24,6 +24,7 @@ follows: checker.Run(...) """ +from __future__ import absolute_import from __future__ import print_function import array @@ -34,22 +35,22 @@ import itertools import os import subprocess +# pylint: disable=redefined-builtin +from six.moves import range + from update_payload import common from update_payload import error from update_payload import format_utils from update_payload import histogram from update_payload import update_metadata_pb2 - # # Constants. # -_CHECK_DST_PSEUDO_EXTENTS = 'dst-pseudo-extents' _CHECK_MOVE_SAME_SRC_DST_BLOCK = 'move-same-src-dst-block' _CHECK_PAYLOAD_SIG = 'payload-sig' CHECKS_TO_DISABLE = ( - _CHECK_DST_PSEUDO_EXTENTS, _CHECK_MOVE_SAME_SRC_DST_BLOCK, _CHECK_PAYLOAD_SIG, ) @@ -66,15 +67,14 @@ _DEFAULT_PUBKEY_FILE_NAME = os.path.join(os.path.dirname(__file__), # Supported minor version map to payload types allowed to be using them. _SUPPORTED_MINOR_VERSIONS = { 0: (_TYPE_FULL,), - 1: (_TYPE_DELTA,), 2: (_TYPE_DELTA,), 3: (_TYPE_DELTA,), 4: (_TYPE_DELTA,), 5: (_TYPE_DELTA,), 6: (_TYPE_DELTA,), + 7: (_TYPE_DELTA,), } -_OLD_DELTA_USABLE_PART_SIZE = 2 * 1024 * 1024 * 1024 # # Helper functions. @@ -323,8 +323,6 @@ class PayloadChecker(object): self.allow_unhashed = allow_unhashed # Disable specific tests. - self.check_dst_pseudo_extents = ( - _CHECK_DST_PSEUDO_EXTENTS not in disabled_tests) self.check_move_same_src_dst_block = ( _CHECK_MOVE_SAME_SRC_DST_BLOCK not in disabled_tests) self.check_payload_sig = _CHECK_PAYLOAD_SIG not in disabled_tests @@ -609,7 +607,7 @@ class PayloadChecker(object): """ self.major_version = self.payload.header.version - part_sizes = collections.defaultdict(int, part_sizes) + part_sizes = part_sizes or collections.defaultdict(int) manifest = self.payload.manifest report.AddSection('manifest') @@ -628,35 +626,23 @@ class PayloadChecker(object): self._CheckPresentIff(self.sigs_offset, self.sigs_size, 'signatures_offset', 'signatures_size', 'manifest') - if self.major_version == common.CHROMEOS_MAJOR_PAYLOAD_VERSION: - for real_name, proto_name in common.CROS_PARTITIONS: - self.old_part_info[real_name] = self._CheckOptionalSubMsg( - manifest, 'old_%s_info' % proto_name, report) - self.new_part_info[real_name] = self._CheckMandatorySubMsg( - manifest, 'new_%s_info' % proto_name, report, 'manifest') - - # Check: old_kernel_info <==> old_rootfs_info. - self._CheckPresentIff(self.old_part_info[common.KERNEL].msg, - self.old_part_info[common.ROOTFS].msg, - 'old_kernel_info', 'old_rootfs_info', 'manifest') - else: - for part in manifest.partitions: - name = part.partition_name - self.old_part_info[name] = self._CheckOptionalSubMsg( - part, 'old_partition_info', report) - self.new_part_info[name] = self._CheckMandatorySubMsg( - part, 'new_partition_info', report, 'manifest.partitions') - - # Check: Old-style partition infos should not be specified. - for _, part in common.CROS_PARTITIONS: - self._CheckElemNotPresent(manifest, 'old_%s_info' % part, 'manifest') - self._CheckElemNotPresent(manifest, 'new_%s_info' % part, 'manifest') - - # Check: If old_partition_info is specified anywhere, it must be - # specified everywhere. - old_part_msgs = [part.msg for part in self.old_part_info.values() if part] - self._CheckPresentIffMany(old_part_msgs, 'old_partition_info', - 'manifest.partitions') + for part in manifest.partitions: + name = part.partition_name + self.old_part_info[name] = self._CheckOptionalSubMsg( + part, 'old_partition_info', report) + self.new_part_info[name] = self._CheckMandatorySubMsg( + part, 'new_partition_info', report, 'manifest.partitions') + + # Check: Old-style partition infos should not be specified. + for _, part in common.CROS_PARTITIONS: + self._CheckElemNotPresent(manifest, 'old_%s_info' % part, 'manifest') + self._CheckElemNotPresent(manifest, 'new_%s_info' % part, 'manifest') + + # Check: If old_partition_info is specified anywhere, it must be + # specified everywhere. + old_part_msgs = [part.msg for part in self.old_part_info.values() if part] + self._CheckPresentIffMany(old_part_msgs, 'old_partition_info', + 'manifest.partitions') is_delta = any(part and part.msg for part in self.old_part_info.values()) if is_delta: @@ -666,7 +652,7 @@ class PayloadChecker(object): 'Apparent full payload contains old_{kernel,rootfs}_info.') self.payload_type = _TYPE_DELTA - for part, (msg, part_report) in self.old_part_info.iteritems(): + for part, (msg, part_report) in self.old_part_info.items(): # Check: {size, hash} present in old_{kernel,rootfs}_info. field = 'old_%s_info' % part self.old_fs_sizes[part] = self._CheckMandatoryField(msg, 'size', @@ -687,7 +673,7 @@ class PayloadChecker(object): self.payload_type = _TYPE_FULL # Check: new_{kernel,rootfs}_info present; contains {size, hash}. - for part, (msg, part_report) in self.new_part_info.iteritems(): + for part, (msg, part_report) in self.new_part_info.items(): field = 'new_%s_info' % part self.new_fs_sizes[part] = self._CheckMandatoryField(msg, 'size', part_report, field) @@ -724,8 +710,7 @@ class PayloadChecker(object): self._CheckBlocksFitLength(length, total_blocks, self.block_size, '%s: %s' % (op_name, length_name)) - def _CheckExtents(self, extents, usable_size, block_counters, name, - allow_pseudo=False, allow_signature=False): + def _CheckExtents(self, extents, usable_size, block_counters, name): """Checks a sequence of extents. Args: @@ -733,8 +718,6 @@ class PayloadChecker(object): usable_size: The usable size of the partition to which the extents apply. block_counters: Array of counters corresponding to the number of blocks. name: The name of the extent block. - allow_pseudo: Whether or not pseudo block numbers are allowed. - allow_signature: Whether or not the extents are used for a signature. Returns: The total number of blocks in the extents. @@ -755,20 +738,15 @@ class PayloadChecker(object): if num_blocks == 0: raise error.PayloadError('%s: extent length is zero.' % ex_name) - if start_block != common.PSEUDO_EXTENT_MARKER: - # Check: Make sure we're within the partition limit. - if usable_size and end_block * self.block_size > usable_size: - raise error.PayloadError( - '%s: extent (%s) exceeds usable partition size (%d).' % - (ex_name, common.FormatExtent(ex, self.block_size), usable_size)) + # Check: Make sure we're within the partition limit. + if usable_size and end_block * self.block_size > usable_size: + raise error.PayloadError( + '%s: extent (%s) exceeds usable partition size (%d).' % + (ex_name, common.FormatExtent(ex, self.block_size), usable_size)) - # Record block usage. - for i in xrange(start_block, end_block): - block_counters[i] += 1 - elif not (allow_pseudo or (allow_signature and len(extents) == 1)): - # Pseudo-extents must be allowed explicitly, or otherwise be part of a - # signature operation (in which case there has to be exactly one). - raise error.PayloadError('%s: unexpected pseudo-extent.' % ex_name) + # Record block usage. + for i in range(start_block, end_block): + block_counters[i] += 1 total_num_blocks += num_blocks @@ -786,6 +764,11 @@ class PayloadChecker(object): Raises: error.PayloadError if any check fails. """ + # Check: total_dst_blocks is not a floating point. + if isinstance(total_dst_blocks, float): + raise error.PayloadError('%s: contains invalid data type of ' + 'total_dst_blocks.' % op_name) + # Check: Does not contain src extents. if op.src_extents: raise error.PayloadError('%s: contains src_extents.' % op_name) @@ -806,89 +789,6 @@ class PayloadChecker(object): 'space (%d * %d).' % (op_name, data_length, total_dst_blocks, self.block_size)) - def _CheckMoveOperation(self, op, data_offset, total_src_blocks, - total_dst_blocks, op_name): - """Specific checks for MOVE operations. - - Args: - op: The operation object from the manifest. - data_offset: The offset of a data blob for the operation. - total_src_blocks: Total number of blocks in src_extents. - total_dst_blocks: Total number of blocks in dst_extents. - op_name: Operation name for error reporting. - - Raises: - error.PayloadError if any check fails. - """ - # Check: No data_{offset,length}. - if data_offset is not None: - raise error.PayloadError('%s: contains data_{offset,length}.' % op_name) - - # Check: total_src_blocks == total_dst_blocks. - if total_src_blocks != total_dst_blocks: - raise error.PayloadError( - '%s: total src blocks (%d) != total dst blocks (%d).' % - (op_name, total_src_blocks, total_dst_blocks)) - - # Check: For all i, i-th src block index != i-th dst block index. - i = 0 - src_extent_iter = iter(op.src_extents) - dst_extent_iter = iter(op.dst_extents) - src_extent = dst_extent = None - src_idx = src_num = dst_idx = dst_num = 0 - while i < total_src_blocks: - # Get the next source extent, if needed. - if not src_extent: - try: - src_extent = src_extent_iter.next() - except StopIteration: - raise error.PayloadError('%s: ran out of src extents (%d/%d).' % - (op_name, i, total_src_blocks)) - src_idx = src_extent.start_block - src_num = src_extent.num_blocks - - # Get the next dest extent, if needed. - if not dst_extent: - try: - dst_extent = dst_extent_iter.next() - except StopIteration: - raise error.PayloadError('%s: ran out of dst extents (%d/%d).' % - (op_name, i, total_dst_blocks)) - dst_idx = dst_extent.start_block - dst_num = dst_extent.num_blocks - - # Check: start block is not 0. See crbug/480751; there are still versions - # of update_engine which fail when seeking to 0 in PReadAll and PWriteAll, - # so we need to fail payloads that try to MOVE to/from block 0. - if src_idx == 0 or dst_idx == 0: - raise error.PayloadError( - '%s: MOVE operation cannot have extent with start block 0' % - op_name) - - if self.check_move_same_src_dst_block and src_idx == dst_idx: - raise error.PayloadError( - '%s: src/dst block number %d is the same (%d).' % - (op_name, i, src_idx)) - - advance = min(src_num, dst_num) - i += advance - - src_idx += advance - src_num -= advance - if src_num == 0: - src_extent = None - - dst_idx += advance - dst_num -= advance - if dst_num == 0: - dst_extent = None - - # Make sure we've exhausted all src/dst extents. - if src_extent: - raise error.PayloadError('%s: excess src blocks.' % op_name) - if dst_extent: - raise error.PayloadError('%s: excess dst blocks.' % op_name) - def _CheckZeroOperation(self, op, op_name): """Specific checks for ZERO operations. @@ -908,7 +808,7 @@ class PayloadChecker(object): raise error.PayloadError('%s: contains data_offset.' % op_name) def _CheckAnyDiffOperation(self, op, data_length, total_dst_blocks, op_name): - """Specific checks for BSDIFF, SOURCE_BSDIFF, PUFFDIFF and BROTLI_BSDIFF + """Specific checks for SOURCE_BSDIFF, PUFFDIFF and BROTLI_BSDIFF operations. Args: @@ -933,8 +833,7 @@ class PayloadChecker(object): total_dst_blocks * self.block_size)) # Check the existence of src_length and dst_length for legacy bsdiffs. - if (op.type == common.OpType.BSDIFF or - (op.type == common.OpType.SOURCE_BSDIFF and self.minor_version <= 3)): + if op.type == common.OpType.SOURCE_BSDIFF and self.minor_version <= 3: if not op.HasField('src_length') or not op.HasField('dst_length'): raise error.PayloadError('%s: require {src,dst}_length.' % op_name) else: @@ -983,21 +882,19 @@ class PayloadChecker(object): if self.minor_version >= 3 and op.src_sha256_hash is None: raise error.PayloadError('%s: source hash missing.' % op_name) - def _CheckOperation(self, op, op_name, is_last, old_block_counters, - new_block_counters, old_usable_size, new_usable_size, - prev_data_offset, allow_signature, blob_hash_counts): + def _CheckOperation(self, op, op_name, old_block_counters, new_block_counters, + old_usable_size, new_usable_size, prev_data_offset, + blob_hash_counts): """Checks a single update operation. Args: op: The operation object. op_name: Operation name string for error reporting. - is_last: Whether this is the last operation in the sequence. old_block_counters: Arrays of block read counters. new_block_counters: Arrays of block write counters. old_usable_size: The overall usable size for src data in bytes. new_usable_size: The overall usable size for dst data in bytes. prev_data_offset: Offset of last used data bytes. - allow_signature: Whether this may be a signature operation. blob_hash_counts: Counters for hashed/unhashed blobs. Returns: @@ -1009,14 +906,10 @@ class PayloadChecker(object): # Check extents. total_src_blocks = self._CheckExtents( op.src_extents, old_usable_size, old_block_counters, - op_name + '.src_extents', allow_pseudo=True) - allow_signature_in_extents = (allow_signature and is_last and - op.type == common.OpType.REPLACE) + op_name + '.src_extents') total_dst_blocks = self._CheckExtents( op.dst_extents, new_usable_size, new_block_counters, - op_name + '.dst_extents', - allow_pseudo=(not self.check_dst_pseudo_extents), - allow_signature=allow_signature_in_extents) + op_name + '.dst_extents') # Check: data_offset present <==> data_length present. data_offset = self._CheckOptionalField(op, 'data_offset', None) @@ -1052,9 +945,7 @@ class PayloadChecker(object): (op_name, common.FormatSha256(op.data_sha256_hash), common.FormatSha256(actual_hash.digest()))) elif data_offset is not None: - if allow_signature_in_extents: - blob_hash_counts['signature'] += 1 - elif self.allow_unhashed: + if self.allow_unhashed: blob_hash_counts['unhashed'] += 1 else: raise error.PayloadError('%s: unhashed operation not allowed.' % @@ -1068,19 +959,11 @@ class PayloadChecker(object): (op_name, data_offset, prev_data_offset)) # Type-specific checks. - if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ): - self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name) - elif (op.type == common.OpType.REPLACE_XZ and - (self.minor_version >= 3 or - self.major_version >= common.BRILLO_MAJOR_PAYLOAD_VERSION)): + if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ, + common.OpType.REPLACE_XZ): self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name) - elif op.type == common.OpType.MOVE and self.minor_version == 1: - self._CheckMoveOperation(op, data_offset, total_src_blocks, - total_dst_blocks, op_name) elif op.type == common.OpType.ZERO and self.minor_version >= 4: self._CheckZeroOperation(op, op_name) - elif op.type == common.OpType.BSDIFF and self.minor_version == 1: - self._CheckAnyDiffOperation(op, data_length, total_dst_blocks, op_name) elif op.type == common.OpType.SOURCE_COPY and self.minor_version >= 2: self._CheckSourceCopyOperation(data_offset, total_src_blocks, total_dst_blocks, op_name) @@ -1102,7 +985,7 @@ class PayloadChecker(object): def _SizeToNumBlocks(self, size): """Returns the number of blocks needed to contain a given byte size.""" - return (size + self.block_size - 1) / self.block_size + return (size + self.block_size - 1) // self.block_size def _AllocBlockCounters(self, total_size): """Returns a freshly initialized array of block counters. @@ -1122,7 +1005,7 @@ class PayloadChecker(object): def _CheckOperations(self, operations, report, base_name, old_fs_size, new_fs_size, old_usable_size, new_usable_size, - prev_data_offset, allow_signature): + prev_data_offset): """Checks a sequence of update operations. Args: @@ -1134,7 +1017,6 @@ class PayloadChecker(object): old_usable_size: The overall usable size of the old partition in bytes. new_usable_size: The overall usable size of the new partition in bytes. prev_data_offset: Offset of last used data bytes. - allow_signature: Whether this sequence may contain signature operations. Returns: The total data blob size used. @@ -1149,9 +1031,7 @@ class PayloadChecker(object): common.OpType.REPLACE: 0, common.OpType.REPLACE_BZ: 0, common.OpType.REPLACE_XZ: 0, - common.OpType.MOVE: 0, common.OpType.ZERO: 0, - common.OpType.BSDIFF: 0, common.OpType.SOURCE_COPY: 0, common.OpType.SOURCE_BSDIFF: 0, common.OpType.PUFFDIFF: 0, @@ -1162,8 +1042,6 @@ class PayloadChecker(object): common.OpType.REPLACE: 0, common.OpType.REPLACE_BZ: 0, common.OpType.REPLACE_XZ: 0, - # MOVE operations don't have blobs. - common.OpType.BSDIFF: 0, # SOURCE_COPY operations don't have blobs. common.OpType.SOURCE_BSDIFF: 0, common.OpType.PUFFDIFF: 0, @@ -1174,8 +1052,6 @@ class PayloadChecker(object): 'hashed': 0, 'unhashed': 0, } - if allow_signature: - blob_hash_counts['signature'] = 0 # Allocate old and new block counters. old_block_counters = (self._AllocBlockCounters(old_usable_size) @@ -1188,16 +1064,14 @@ class PayloadChecker(object): op_num += 1 # Check: Type is valid. - if op.type not in op_counts.keys(): + if op.type not in op_counts: raise error.PayloadError('%s: invalid type (%d).' % (op_name, op.type)) op_counts[op.type] += 1 - is_last = op_num == len(operations) curr_data_used = self._CheckOperation( - op, op_name, is_last, old_block_counters, new_block_counters, + op, op_name, old_block_counters, new_block_counters, old_usable_size, new_usable_size, - prev_data_offset + total_data_used, allow_signature, - blob_hash_counts) + prev_data_offset + total_data_used, blob_hash_counts) if curr_data_used: op_blob_totals[op.type] += curr_data_used total_data_used += curr_data_used @@ -1251,21 +1125,17 @@ class PayloadChecker(object): if not sigs.signatures: raise error.PayloadError('Signature block is empty.') - last_ops_section = (self.payload.manifest.kernel_install_operations or - self.payload.manifest.install_operations) - - # Only major version 1 has the fake signature OP at the end. - if self.major_version == common.CHROMEOS_MAJOR_PAYLOAD_VERSION: - fake_sig_op = last_ops_section[-1] + # Check that we don't have the signature operation blob at the end (used to + # be for major version 1). + last_partition = self.payload.manifest.partitions[-1] + if last_partition.operations: + last_op = last_partition.operations[-1] # Check: signatures_{offset,size} must match the last (fake) operation. - if not (fake_sig_op.type == common.OpType.REPLACE and - self.sigs_offset == fake_sig_op.data_offset and - self.sigs_size == fake_sig_op.data_length): - raise error.PayloadError('Signatures_{offset,size} (%d+%d) does not' - ' match last operation (%d+%d).' % - (self.sigs_offset, self.sigs_size, - fake_sig_op.data_offset, - fake_sig_op.data_length)) + if (last_op.type == common.OpType.REPLACE and + last_op.data_offset == self.sigs_offset and + last_op.data_length == self.sigs_size): + raise error.PayloadError('It seems like the last operation is the ' + 'signature blob. This is an invalid payload.') # Compute the checksum of all data up to signature blob. # TODO(garnold) we're re-reading the whole data section into a string @@ -1280,17 +1150,13 @@ class PayloadChecker(object): sig_report = report.AddSubReport(sig_name) # Check: Signature contains mandatory fields. - self._CheckMandatoryField(sig, 'version', sig_report, sig_name) self._CheckMandatoryField(sig, 'data', None, sig_name) sig_report.AddField('data len', len(sig.data)) # Check: Signatures pertains to actual payload hash. - if sig.version == 1: + if sig.data: self._CheckSha256Signature(sig.data, pubkey_file_name, payload_hasher.digest(), sig_name) - else: - raise error.PayloadError('Unknown signature version (%d).' % - sig.version) def Run(self, pubkey_file_name=None, metadata_sig_file=None, metadata_size=0, part_sizes=None, report_out_file=None): @@ -1344,62 +1210,38 @@ class PayloadChecker(object): self._CheckManifest(report, part_sizes) assert self.payload_type, 'payload type should be known by now' - manifest = self.payload.manifest - - # Part 3: Examine partition operations. - install_operations = [] - if self.major_version == common.CHROMEOS_MAJOR_PAYLOAD_VERSION: - # partitions field should not ever exist in major version 1 payloads - self._CheckRepeatedElemNotPresent(manifest, 'partitions', 'manifest') - - install_operations.append((common.ROOTFS, manifest.install_operations)) - install_operations.append((common.KERNEL, - manifest.kernel_install_operations)) - - else: - self._CheckRepeatedElemNotPresent(manifest, 'install_operations', + # Make sure deprecated values are not present in the payload. + for field in ('install_operations', 'kernel_install_operations'): + self._CheckRepeatedElemNotPresent(self.payload.manifest, field, 'manifest') - self._CheckRepeatedElemNotPresent(manifest, 'kernel_install_operations', - 'manifest') - - for update in manifest.partitions: - install_operations.append((update.partition_name, update.operations)) + for field in ('old_kernel_info', 'old_rootfs_info', + 'new_kernel_info', 'new_rootfs_info'): + self._CheckElemNotPresent(self.payload.manifest, field, 'manifest') total_blob_size = 0 - for part, operations in install_operations: + for part, operations in ((p.partition_name, p.operations) + for p in self.payload.manifest.partitions): report.AddSection('%s operations' % part) new_fs_usable_size = self.new_fs_sizes[part] old_fs_usable_size = self.old_fs_sizes[part] - if part_sizes.get(part, None): + if part_sizes is not None and part_sizes.get(part, None): new_fs_usable_size = old_fs_usable_size = part_sizes[part] - # Infer the usable partition size when validating rootfs operations: - # - If rootfs partition size was provided, use that. - # - Otherwise, if this is an older delta (minor version < 2), stick with - # a known constant size. This is necessary because older deltas may - # exceed the filesystem size when moving data blocks around. - # - Otherwise, use the encoded filesystem size. - elif self.payload_type == _TYPE_DELTA and part == common.ROOTFS and \ - self.minor_version in (None, 1): - new_fs_usable_size = old_fs_usable_size = _OLD_DELTA_USABLE_PART_SIZE - - # TODO(garnold)(chromium:243559) only default to the filesystem size if - # no explicit size provided *and* the partition size is not embedded in - # the payload; see issue for more details. + + # TODO(chromium:243559) only default to the filesystem size if no + # explicit size provided *and* the partition size is not embedded in the + # payload; see issue for more details. total_blob_size += self._CheckOperations( operations, report, '%s_install_operations' % part, self.old_fs_sizes[part], self.new_fs_sizes[part], - old_fs_usable_size, new_fs_usable_size, total_blob_size, - (self.major_version == common.CHROMEOS_MAJOR_PAYLOAD_VERSION - and part == common.KERNEL)) + old_fs_usable_size, new_fs_usable_size, total_blob_size) # Check: Operations data reach the end of the payload file. used_payload_size = self.payload.data_offset + total_blob_size # Major versions 2 and higher have a signature at the end, so it should be # considered in the total size of the image. - if (self.major_version >= common.BRILLO_MAJOR_PAYLOAD_VERSION and - self.sigs_size): + if self.sigs_size: used_payload_size += self.sigs_size if used_payload_size != payload_file_size: diff --git a/scripts/update_payload/checker_unittest.py b/scripts/update_payload/checker_unittest.py index 7e52233e..993b785c 100755 --- a/scripts/update_payload/checker_unittest.py +++ b/scripts/update_payload/checker_unittest.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2 +#!/usr/bin/env python # # Copyright (C) 2013 The Android Open Source Project # @@ -17,35 +17,36 @@ """Unit testing checker.py.""" -from __future__ import print_function +# Disable check for function names to avoid errors based on old code +# pylint: disable-msg=invalid-name + +from __future__ import absolute_import import array import collections -import cStringIO import hashlib +import io import itertools import os import unittest -# pylint cannot find mox. -# pylint: disable=F0401 -import mox +from six.moves import zip + +import mock # pylint: disable=import-error from update_payload import checker from update_payload import common from update_payload import test_utils from update_payload import update_metadata_pb2 from update_payload.error import PayloadError -from update_payload.payload import Payload # Avoid name conflicts later. +from update_payload.payload import Payload # Avoid name conflicts later. def _OpTypeByName(op_name): - """Returns the type of an operation from itsname.""" + """Returns the type of an operation from its name.""" op_name_to_type = { 'REPLACE': common.OpType.REPLACE, 'REPLACE_BZ': common.OpType.REPLACE_BZ, - 'MOVE': common.OpType.MOVE, - 'BSDIFF': common.OpType.BSDIFF, 'SOURCE_COPY': common.OpType.SOURCE_COPY, 'SOURCE_BSDIFF': common.OpType.SOURCE_BSDIFF, 'ZERO': common.OpType.ZERO, @@ -65,7 +66,7 @@ def _GetPayloadChecker(payload_gen_write_to_file_func, payload_gen_dargs=None, if checker_init_dargs is None: checker_init_dargs = {} - payload_file = cStringIO.StringIO() + payload_file = io.BytesIO() payload_gen_write_to_file_func(payload_file, **payload_gen_dargs) payload_file.seek(0) payload = Payload(payload_file) @@ -75,7 +76,7 @@ def _GetPayloadChecker(payload_gen_write_to_file_func, payload_gen_dargs=None, def _GetPayloadCheckerWithData(payload_gen): """Returns a payload checker from a given payload generator.""" - payload_file = cStringIO.StringIO() + payload_file = io.BytesIO() payload_gen.WriteToFile(payload_file) payload_file.seek(0) payload = Payload(payload_file) @@ -89,7 +90,7 @@ def _GetPayloadCheckerWithData(payload_gen): # pylint: disable=W0212 # Don't bark about missing members of classes you cannot import. # pylint: disable=E1101 -class PayloadCheckerTest(mox.MoxTestBase): +class PayloadCheckerTest(unittest.TestCase): """Tests the PayloadChecker class. In addition to ordinary testFoo() methods, which are automatically invoked by @@ -102,11 +103,42 @@ class PayloadCheckerTest(mox.MoxTestBase): all such tests is done in AddAllParametricTests(). """ + def setUp(self): + """setUp function for unittest testcase""" + self.mock_checks = [] + + def tearDown(self): + """tearDown function for unittest testcase""" + # Verify that all mock functions were called. + for check in self.mock_checks: + check.mock_fn.assert_called_once_with(*check.exp_args, **check.exp_kwargs) + + class MockChecksAtTearDown(object): + """Mock data storage. + + This class stores the mock functions and its arguments to be checked at a + later point. + """ + def __init__(self, mock_fn, *args, **kwargs): + self.mock_fn = mock_fn + self.exp_args = args + self.exp_kwargs = kwargs + + def addPostCheckForMockFunction(self, mock_fn, *args, **kwargs): + """Store a mock function and its arguments to self.mock_checks + + Args: + mock_fn: mock function object + args: expected positional arguments for the mock_fn + kwargs: expected named arguments for the mock_fn + """ + self.mock_checks.append(self.MockChecksAtTearDown(mock_fn, *args, **kwargs)) + def MockPayload(self): """Create a mock payload object, complete with a mock manifest.""" - payload = self.mox.CreateMock(Payload) + payload = mock.create_autospec(Payload) payload.is_init = True - payload.manifest = self.mox.CreateMock( + payload.manifest = mock.create_autospec( update_metadata_pb2.DeltaArchiveManifest) return payload @@ -175,19 +207,20 @@ class PayloadCheckerTest(mox.MoxTestBase): subreport = 'fake subreport' # Create a mock message. - msg = self.mox.CreateMock(update_metadata_pb2._message.Message) - msg.HasField(name).AndReturn(is_present) + msg = mock.create_autospec(update_metadata_pb2._message.Message) + self.addPostCheckForMockFunction(msg.HasField, name) + msg.HasField.return_value = is_present setattr(msg, name, val) - # Create a mock report. - report = self.mox.CreateMock(checker._PayloadReport) + report = mock.create_autospec(checker._PayloadReport) if is_present: if is_submsg: - report.AddSubReport(name).AndReturn(subreport) + self.addPostCheckForMockFunction(report.AddSubReport, name) + report.AddSubReport.return_value = subreport else: - report.AddField(name, convert(val), linebreak=linebreak, indent=indent) + self.addPostCheckForMockFunction(report.AddField, name, convert(val), + linebreak=linebreak, indent=indent) - self.mox.ReplayAll() return (msg, report, subreport, name, val) def DoAddElemTest(self, is_present, is_mandatory, is_submsg, convert, @@ -213,9 +246,9 @@ class PayloadCheckerTest(mox.MoxTestBase): else: ret_val, ret_subreport = checker.PayloadChecker._CheckElem(*args, **kwargs) - self.assertEquals(val if is_present else None, ret_val) - self.assertEquals(subreport if is_present and is_submsg else None, - ret_subreport) + self.assertEqual(val if is_present else None, ret_val) + self.assertEqual(subreport if is_present and is_submsg else None, + ret_subreport) def DoAddFieldTest(self, is_mandatory, is_present, convert, linebreak, indent): @@ -245,7 +278,7 @@ class PayloadCheckerTest(mox.MoxTestBase): self.assertRaises(PayloadError, tested_func, *args, **kwargs) else: ret_val = tested_func(*args, **kwargs) - self.assertEquals(val if is_present else None, ret_val) + self.assertEqual(val if is_present else None, ret_val) def DoAddSubMsgTest(self, is_mandatory, is_present): """Parametrized testing of _Check{Mandatory,Optional}SubMsg(). @@ -269,8 +302,8 @@ class PayloadCheckerTest(mox.MoxTestBase): self.assertRaises(PayloadError, tested_func, *args) else: ret_val, ret_subreport = tested_func(*args) - self.assertEquals(val if is_present else None, ret_val) - self.assertEquals(subreport if is_present else None, ret_subreport) + self.assertEqual(val if is_present else None, ret_val) + self.assertEqual(subreport if is_present else None, ret_subreport) def testCheckPresentIff(self): """Tests _CheckPresentIff().""" @@ -296,15 +329,14 @@ class PayloadCheckerTest(mox.MoxTestBase): returned_signed_hash: The signed hash data retuned by openssl. expected_signed_hash: The signed hash data to compare against. """ - try: - # Stub out the subprocess invocation. - self.mox.StubOutWithMock(checker.PayloadChecker, '_Run') + # Stub out the subprocess invocation. + with mock.patch.object(checker.PayloadChecker, '_Run') \ + as mock_payload_checker: if expect_subprocess_call: - checker.PayloadChecker._Run( - mox.IsA(list), send_data=sig_data).AndReturn( - (sig_asn1_header + returned_signed_hash, None)) + mock_payload_checker([], send_data=sig_data) + mock_payload_checker.return_value = ( + sig_asn1_header + returned_signed_hash, None) - self.mox.ReplayAll() if expect_pass: self.assertIsNone(checker.PayloadChecker._CheckSha256Signature( sig_data, 'foo', expected_signed_hash, 'bar')) @@ -312,13 +344,11 @@ class PayloadCheckerTest(mox.MoxTestBase): self.assertRaises(PayloadError, checker.PayloadChecker._CheckSha256Signature, sig_data, 'foo', expected_signed_hash, 'bar') - finally: - self.mox.UnsetStubs() def testCheckSha256Signature_Pass(self): """Tests _CheckSha256Signature(); pass case.""" sig_data = 'fake-signature'.ljust(256) - signed_hash = hashlib.sha256('fake-data').digest() + signed_hash = hashlib.sha256(b'fake-data').digest() self.DoCheckSha256SignatureTest(True, True, sig_data, common.SIG_ASN1_HEADER, signed_hash, signed_hash) @@ -326,7 +356,7 @@ class PayloadCheckerTest(mox.MoxTestBase): def testCheckSha256Signature_FailBadSignature(self): """Tests _CheckSha256Signature(); fails due to malformed signature.""" sig_data = 'fake-signature' # Malformed (not 256 bytes in length). - signed_hash = hashlib.sha256('fake-data').digest() + signed_hash = hashlib.sha256(b'fake-data').digest() self.DoCheckSha256SignatureTest(False, False, sig_data, common.SIG_ASN1_HEADER, signed_hash, signed_hash) @@ -334,7 +364,7 @@ class PayloadCheckerTest(mox.MoxTestBase): def testCheckSha256Signature_FailBadOutputLength(self): """Tests _CheckSha256Signature(); fails due to unexpected output length.""" sig_data = 'fake-signature'.ljust(256) - signed_hash = 'fake-hash' # Malformed (not 32 bytes in length). + signed_hash = b'fake-hash' # Malformed (not 32 bytes in length). self.DoCheckSha256SignatureTest(False, True, sig_data, common.SIG_ASN1_HEADER, signed_hash, signed_hash) @@ -342,16 +372,16 @@ class PayloadCheckerTest(mox.MoxTestBase): def testCheckSha256Signature_FailBadAsnHeader(self): """Tests _CheckSha256Signature(); fails due to bad ASN1 header.""" sig_data = 'fake-signature'.ljust(256) - signed_hash = hashlib.sha256('fake-data').digest() - bad_asn1_header = 'bad-asn-header'.ljust(len(common.SIG_ASN1_HEADER)) + signed_hash = hashlib.sha256(b'fake-data').digest() + bad_asn1_header = b'bad-asn-header'.ljust(len(common.SIG_ASN1_HEADER)) self.DoCheckSha256SignatureTest(False, True, sig_data, bad_asn1_header, signed_hash, signed_hash) def testCheckSha256Signature_FailBadHash(self): """Tests _CheckSha256Signature(); fails due to bad hash returned.""" sig_data = 'fake-signature'.ljust(256) - expected_signed_hash = hashlib.sha256('fake-data').digest() - returned_signed_hash = hashlib.sha256('bad-fake-data').digest() + expected_signed_hash = hashlib.sha256(b'fake-data').digest() + returned_signed_hash = hashlib.sha256(b'bad-fake-data').digest() self.DoCheckSha256SignatureTest(False, True, sig_data, common.SIG_ASN1_HEADER, expected_signed_hash, returned_signed_hash) @@ -429,10 +459,10 @@ class PayloadCheckerTest(mox.MoxTestBase): payload_gen.SetBlockSize(test_utils.KiB(4)) # Add some operations. - payload_gen.AddOperation(False, common.OpType.MOVE, + payload_gen.AddOperation(common.ROOTFS, common.OpType.SOURCE_COPY, src_extents=[(0, 16), (16, 497)], dst_extents=[(16, 496), (0, 16)]) - payload_gen.AddOperation(True, common.OpType.MOVE, + payload_gen.AddOperation(common.KERNEL, common.OpType.SOURCE_COPY, src_extents=[(0, 8), (8, 8)], dst_extents=[(8, 8), (0, 8)]) @@ -457,21 +487,23 @@ class PayloadCheckerTest(mox.MoxTestBase): # Add old kernel/rootfs partition info, as required. if fail_mismatched_oki_ori or fail_old_kernel_fs_size or fail_bad_oki: oki_hash = (None if fail_bad_oki - else hashlib.sha256('fake-oki-content').digest()) - payload_gen.SetPartInfo(True, False, old_kernel_fs_size, oki_hash) + else hashlib.sha256(b'fake-oki-content').digest()) + payload_gen.SetPartInfo(common.KERNEL, False, old_kernel_fs_size, + oki_hash) if not fail_mismatched_oki_ori and (fail_old_rootfs_fs_size or fail_bad_ori): ori_hash = (None if fail_bad_ori - else hashlib.sha256('fake-ori-content').digest()) - payload_gen.SetPartInfo(False, False, old_rootfs_fs_size, ori_hash) + else hashlib.sha256(b'fake-ori-content').digest()) + payload_gen.SetPartInfo(common.ROOTFS, False, old_rootfs_fs_size, + ori_hash) # Add new kernel/rootfs partition info. payload_gen.SetPartInfo( - True, True, new_kernel_fs_size, - None if fail_bad_nki else hashlib.sha256('fake-nki-content').digest()) + common.KERNEL, True, new_kernel_fs_size, + None if fail_bad_nki else hashlib.sha256(b'fake-nki-content').digest()) payload_gen.SetPartInfo( - False, True, new_rootfs_fs_size, - None if fail_bad_nri else hashlib.sha256('fake-nri-content').digest()) + common.ROOTFS, True, new_rootfs_fs_size, + None if fail_bad_nri else hashlib.sha256(b'fake-nri-content').digest()) # Set the minor version. payload_gen.SetMinorVersion(0) @@ -518,28 +550,11 @@ class PayloadCheckerTest(mox.MoxTestBase): # Passes w/ all real extents. extents = self.NewExtentList((0, 4), (8, 3), (1024, 16)) - self.assertEquals( + self.assertEqual( 23, payload_checker._CheckExtents(extents, (1024 + 16) * block_size, collections.defaultdict(int), 'foo')) - # Passes w/ pseudo-extents (aka sparse holes). - extents = self.NewExtentList((0, 4), (common.PSEUDO_EXTENT_MARKER, 5), - (8, 3)) - self.assertEquals( - 12, - payload_checker._CheckExtents(extents, (1024 + 16) * block_size, - collections.defaultdict(int), 'foo', - allow_pseudo=True)) - - # Passes w/ pseudo-extent due to a signature. - extents = self.NewExtentList((common.PSEUDO_EXTENT_MARKER, 2)) - self.assertEquals( - 2, - payload_checker._CheckExtents(extents, (1024 + 16) * block_size, - collections.defaultdict(int), 'foo', - allow_signature=True)) - # Fails, extent missing a start block. extents = self.NewExtentList((-1, 4), (8, 3), (1024, 16)) self.assertRaises( @@ -570,34 +585,34 @@ class PayloadCheckerTest(mox.MoxTestBase): block_size = payload_checker.block_size data_length = 10000 - op = self.mox.CreateMock( - update_metadata_pb2.InstallOperation) + op = mock.create_autospec(update_metadata_pb2.InstallOperation) op.type = common.OpType.REPLACE # Pass. op.src_extents = [] self.assertIsNone( payload_checker._CheckReplaceOperation( - op, data_length, (data_length + block_size - 1) / block_size, + op, data_length, (data_length + block_size - 1) // block_size, 'foo')) # Fail, src extents founds. op.src_extents = ['bar'] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, data_length, (data_length + block_size - 1) / block_size, 'foo') + op, data_length, (data_length + block_size - 1) // block_size, 'foo') # Fail, missing data. op.src_extents = [] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, None, (data_length + block_size - 1) / block_size, 'foo') + op, None, (data_length + block_size - 1) // block_size, 'foo') # Fail, length / block number mismatch. op.src_extents = ['bar'] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, data_length, (data_length + block_size - 1) / block_size + 1, 'foo') + op, data_length, (data_length + block_size - 1) // block_size + 1, + 'foo') def testCheckReplaceBzOperation(self): """Tests _CheckReplaceOperation() where op.type == REPLACE_BZ.""" @@ -605,7 +620,7 @@ class PayloadCheckerTest(mox.MoxTestBase): block_size = payload_checker.block_size data_length = block_size * 3 - op = self.mox.CreateMock( + op = mock.create_autospec( update_metadata_pb2.InstallOperation) op.type = common.OpType.REPLACE_BZ @@ -613,25 +628,32 @@ class PayloadCheckerTest(mox.MoxTestBase): op.src_extents = [] self.assertIsNone( payload_checker._CheckReplaceOperation( - op, data_length, (data_length + block_size - 1) / block_size + 5, + op, data_length, (data_length + block_size - 1) // block_size + 5, 'foo')) # Fail, src extents founds. op.src_extents = ['bar'] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, data_length, (data_length + block_size - 1) / block_size + 5, 'foo') + op, data_length, (data_length + block_size - 1) // block_size + 5, + 'foo') # Fail, missing data. op.src_extents = [] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, None, (data_length + block_size - 1) / block_size, 'foo') + op, None, (data_length + block_size - 1) // block_size, 'foo') # Fail, too few blocks to justify BZ. op.src_extents = [] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, + op, data_length, (data_length + block_size - 1) // block_size, 'foo') + + # Fail, total_dst_blocks is a floating point value. + op.src_extents = [] + self.assertRaises( + PayloadError, payload_checker._CheckReplaceOperation, op, data_length, (data_length + block_size - 1) / block_size, 'foo') def testCheckReplaceXzOperation(self): @@ -640,7 +662,7 @@ class PayloadCheckerTest(mox.MoxTestBase): block_size = payload_checker.block_size data_length = block_size * 3 - op = self.mox.CreateMock( + op = mock.create_autospec( update_metadata_pb2.InstallOperation) op.type = common.OpType.REPLACE_XZ @@ -648,152 +670,33 @@ class PayloadCheckerTest(mox.MoxTestBase): op.src_extents = [] self.assertIsNone( payload_checker._CheckReplaceOperation( - op, data_length, (data_length + block_size - 1) / block_size + 5, + op, data_length, (data_length + block_size - 1) // block_size + 5, 'foo')) # Fail, src extents founds. op.src_extents = ['bar'] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, data_length, (data_length + block_size - 1) / block_size + 5, 'foo') + op, data_length, (data_length + block_size - 1) // block_size + 5, + 'foo') # Fail, missing data. op.src_extents = [] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, None, (data_length + block_size - 1) / block_size, 'foo') + op, None, (data_length + block_size - 1) // block_size, 'foo') # Fail, too few blocks to justify XZ. op.src_extents = [] self.assertRaises( PayloadError, payload_checker._CheckReplaceOperation, - op, data_length, (data_length + block_size - 1) / block_size, 'foo') - - def testCheckMoveOperation_Pass(self): - """Tests _CheckMoveOperation(); pass case.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 6))) - self.assertIsNone( - payload_checker._CheckMoveOperation(op, None, 134, 134, 'foo')) - - def testCheckMoveOperation_FailContainsData(self): - """Tests _CheckMoveOperation(); fails, message contains data.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 6))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, 1024, 134, 134, 'foo') - - def testCheckMoveOperation_FailInsufficientSrcBlocks(self): - """Tests _CheckMoveOperation(); fails, not enough actual src blocks.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE + op, data_length, (data_length + block_size - 1) // block_size, 'foo') - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 127))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 6))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - def testCheckMoveOperation_FailInsufficientDstBlocks(self): - """Tests _CheckMoveOperation(); fails, not enough actual dst blocks.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 5))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - def testCheckMoveOperation_FailExcessSrcBlocks(self): - """Tests _CheckMoveOperation(); fails, too many actual src blocks.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 5))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 129))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 6))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - def testCheckMoveOperation_FailExcessDstBlocks(self): - """Tests _CheckMoveOperation(); fails, too many actual dst blocks.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((16, 128), (512, 7))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - def testCheckMoveOperation_FailStagnantBlocks(self): - """Tests _CheckMoveOperation(); fails, there are blocks that do not move.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((8, 128), (512, 6))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - def testCheckMoveOperation_FailZeroStartBlock(self): - """Tests _CheckMoveOperation(); fails, has extent with start block 0.""" - payload_checker = checker.PayloadChecker(self.MockPayload()) - op = update_metadata_pb2.InstallOperation() - op.type = common.OpType.MOVE - - self.AddToMessage(op.src_extents, - self.NewExtentList((0, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((8, 128), (512, 6))) - self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') - - self.AddToMessage(op.src_extents, - self.NewExtentList((1, 4), (12, 2), (1024, 128))) - self.AddToMessage(op.dst_extents, - self.NewExtentList((0, 128), (512, 6))) + # Fail, total_dst_blocks is a floating point value. + op.src_extents = [] self.assertRaises( - PayloadError, payload_checker._CheckMoveOperation, - op, None, 134, 134, 'foo') + PayloadError, payload_checker._CheckReplaceOperation, + op, data_length, (data_length + block_size - 1) / block_size, 'foo') def testCheckAnyDiff(self): """Tests _CheckAnyDiffOperation().""" @@ -832,8 +735,8 @@ class PayloadCheckerTest(mox.MoxTestBase): self.assertRaises(PayloadError, payload_checker._CheckSourceCopyOperation, None, 0, 1, 'foo') - def DoCheckOperationTest(self, op_type_name, is_last, allow_signature, - allow_unhashed, fail_src_extents, fail_dst_extents, + def DoCheckOperationTest(self, op_type_name, allow_unhashed, + fail_src_extents, fail_dst_extents, fail_mismatched_data_offset_length, fail_missing_dst_extents, fail_src_length, fail_dst_length, fail_data_hash, @@ -841,10 +744,8 @@ class PayloadCheckerTest(mox.MoxTestBase): """Parametric testing of _CheckOperation(). Args: - op_type_name: 'REPLACE', 'REPLACE_BZ', 'REPLACE_XZ', 'MOVE', 'BSDIFF', + op_type_name: 'REPLACE', 'REPLACE_BZ', 'REPLACE_XZ', 'SOURCE_COPY', 'SOURCE_BSDIFF', BROTLI_BSDIFF or 'PUFFDIFF'. - is_last: Whether we're testing the last operation in a sequence. - allow_signature: Whether we're testing a signature-capable operation. allow_unhashed: Whether we're allowing to not hash the data. fail_src_extents: Tamper with src extents. fail_dst_extents: Tamper with dst extents. @@ -869,9 +770,9 @@ class PayloadCheckerTest(mox.MoxTestBase): old_part_size = test_utils.MiB(4) new_part_size = test_utils.MiB(8) old_block_counters = array.array( - 'B', [0] * ((old_part_size + block_size - 1) / block_size)) + 'B', [0] * ((old_part_size + block_size - 1) // block_size)) new_block_counters = array.array( - 'B', [0] * ((new_part_size + block_size - 1) / block_size)) + 'B', [0] * ((new_part_size + block_size - 1) // block_size)) prev_data_offset = 1876 blob_hash_counts = collections.defaultdict(int) @@ -880,8 +781,7 @@ class PayloadCheckerTest(mox.MoxTestBase): op.type = op_type total_src_blocks = 0 - if op_type in (common.OpType.MOVE, common.OpType.BSDIFF, - common.OpType.SOURCE_COPY, common.OpType.SOURCE_BSDIFF, + if op_type in (common.OpType.SOURCE_COPY, common.OpType.SOURCE_BSDIFF, common.OpType.PUFFDIFF, common.OpType.BROTLI_BSDIFF): if fail_src_extents: self.AddToMessage(op.src_extents, @@ -891,12 +791,9 @@ class PayloadCheckerTest(mox.MoxTestBase): self.NewExtentList((1, 16))) total_src_blocks = 16 - # TODO(tbrindus): add major version 2 tests. - payload_checker.major_version = common.CHROMEOS_MAJOR_PAYLOAD_VERSION + payload_checker.major_version = common.BRILLO_MAJOR_PAYLOAD_VERSION if op_type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ): payload_checker.minor_version = 0 - elif op_type in (common.OpType.MOVE, common.OpType.BSDIFF): - payload_checker.minor_version = 2 if fail_bad_minor_version else 1 elif op_type in (common.OpType.SOURCE_COPY, common.OpType.SOURCE_BSDIFF): payload_checker.minor_version = 1 if fail_bad_minor_version else 2 if op_type == common.OpType.REPLACE_XZ: @@ -907,7 +804,7 @@ class PayloadCheckerTest(mox.MoxTestBase): elif op_type == common.OpType.PUFFDIFF: payload_checker.minor_version = 4 if fail_bad_minor_version else 5 - if op_type not in (common.OpType.MOVE, common.OpType.SOURCE_COPY): + if op_type != common.OpType.SOURCE_COPY: if not fail_mismatched_data_offset_length: op.data_length = 16 * block_size - 8 if fail_prev_data_offset: @@ -916,20 +813,16 @@ class PayloadCheckerTest(mox.MoxTestBase): op.data_offset = prev_data_offset fake_data = 'fake-data'.ljust(op.data_length) - if not (allow_unhashed or (is_last and allow_signature and - op_type == common.OpType.REPLACE)): - if not fail_data_hash: - # Create a valid data blob hash. - op.data_sha256_hash = hashlib.sha256(fake_data).digest() - payload.ReadDataBlob(op.data_offset, op.data_length).AndReturn( - fake_data) + if not allow_unhashed and not fail_data_hash: + # Create a valid data blob hash. + op.data_sha256_hash = hashlib.sha256(fake_data.encode('utf-8')).digest() + payload.ReadDataBlob.return_value = fake_data.encode('utf-8') elif fail_data_hash: # Create an invalid data blob hash. op.data_sha256_hash = hashlib.sha256( - fake_data.replace(' ', '-')).digest() - payload.ReadDataBlob(op.data_offset, op.data_length).AndReturn( - fake_data) + fake_data.replace(' ', '-').encode('utf-8')).digest() + payload.ReadDataBlob.return_value = fake_data.encode('utf-8') total_dst_blocks = 0 if not fail_missing_dst_extents: @@ -944,8 +837,7 @@ class PayloadCheckerTest(mox.MoxTestBase): if total_src_blocks: if fail_src_length: op.src_length = total_src_blocks * block_size + 8 - elif (op_type in (common.OpType.MOVE, common.OpType.BSDIFF, - common.OpType.SOURCE_BSDIFF) and + elif (op_type == common.OpType.SOURCE_BSDIFF and payload_checker.minor_version <= 3): op.src_length = total_src_blocks * block_size elif fail_src_length: @@ -955,19 +847,17 @@ class PayloadCheckerTest(mox.MoxTestBase): if total_dst_blocks: if fail_dst_length: op.dst_length = total_dst_blocks * block_size + 8 - elif (op_type in (common.OpType.MOVE, common.OpType.BSDIFF, - common.OpType.SOURCE_BSDIFF) and + elif (op_type == common.OpType.SOURCE_BSDIFF and payload_checker.minor_version <= 3): op.dst_length = total_dst_blocks * block_size - self.mox.ReplayAll() should_fail = (fail_src_extents or fail_dst_extents or fail_mismatched_data_offset_length or fail_missing_dst_extents or fail_src_length or fail_dst_length or fail_data_hash or fail_prev_data_offset or fail_bad_minor_version) - args = (op, 'foo', is_last, old_block_counters, new_block_counters, - old_part_size, new_part_size, prev_data_offset, allow_signature, + args = (op, 'foo', old_block_counters, new_block_counters, + old_part_size, new_part_size, prev_data_offset, blob_hash_counts) if should_fail: self.assertRaises(PayloadError, payload_checker._CheckOperation, *args) @@ -1009,8 +899,9 @@ class PayloadCheckerTest(mox.MoxTestBase): if fail_nonexhaustive_full_update: rootfs_data_length -= block_size - payload_gen.AddOperation(False, rootfs_op_type, - dst_extents=[(0, rootfs_data_length / block_size)], + payload_gen.AddOperation(common.ROOTFS, rootfs_op_type, + dst_extents= + [(0, rootfs_data_length // block_size)], data_offset=0, data_length=rootfs_data_length) @@ -1020,17 +911,17 @@ class PayloadCheckerTest(mox.MoxTestBase): 'allow_unhashed': True}) payload_checker.payload_type = checker._TYPE_FULL report = checker._PayloadReport() - - args = (payload_checker.payload.manifest.install_operations, report, 'foo', - 0, rootfs_part_size, rootfs_part_size, rootfs_part_size, 0, False) + partition = next((p for p in payload_checker.payload.manifest.partitions + if p.partition_name == common.ROOTFS), None) + args = (partition.operations, report, 'foo', + 0, rootfs_part_size, rootfs_part_size, rootfs_part_size, 0) if fail_nonexhaustive_full_update: self.assertRaises(PayloadError, payload_checker._CheckOperations, *args) else: self.assertEqual(rootfs_data_length, payload_checker._CheckOperations(*args)) - def DoCheckSignaturesTest(self, fail_empty_sigs_blob, fail_missing_pseudo_op, - fail_mismatched_pseudo_op, fail_sig_missing_fields, + def DoCheckSignaturesTest(self, fail_empty_sigs_blob, fail_sig_missing_fields, fail_unknown_sig_version, fail_incorrect_sig): """Tests _CheckSignatures().""" # Generate a test payload. For this test, we only care about the signature @@ -1041,20 +932,18 @@ class PayloadCheckerTest(mox.MoxTestBase): payload_gen.SetBlockSize(block_size) rootfs_part_size = test_utils.MiB(2) kernel_part_size = test_utils.KiB(16) - payload_gen.SetPartInfo(False, True, rootfs_part_size, - hashlib.sha256('fake-new-rootfs-content').digest()) - payload_gen.SetPartInfo(True, True, kernel_part_size, - hashlib.sha256('fake-new-kernel-content').digest()) + payload_gen.SetPartInfo(common.ROOTFS, True, rootfs_part_size, + hashlib.sha256(b'fake-new-rootfs-content').digest()) + payload_gen.SetPartInfo(common.KERNEL, True, kernel_part_size, + hashlib.sha256(b'fake-new-kernel-content').digest()) payload_gen.SetMinorVersion(0) payload_gen.AddOperationWithData( - False, common.OpType.REPLACE, - dst_extents=[(0, rootfs_part_size / block_size)], + common.ROOTFS, common.OpType.REPLACE, + dst_extents=[(0, rootfs_part_size // block_size)], data_blob=os.urandom(rootfs_part_size)) - do_forge_pseudo_op = (fail_missing_pseudo_op or fail_mismatched_pseudo_op) - do_forge_sigs_data = (do_forge_pseudo_op or fail_empty_sigs_blob or - fail_sig_missing_fields or fail_unknown_sig_version - or fail_incorrect_sig) + do_forge_sigs_data = (fail_empty_sigs_blob or fail_sig_missing_fields or + fail_unknown_sig_version or fail_incorrect_sig) sigs_data = None if do_forge_sigs_data: @@ -1063,29 +952,19 @@ class PayloadCheckerTest(mox.MoxTestBase): if fail_sig_missing_fields: sig_data = None else: - sig_data = test_utils.SignSha256('fake-payload-content', + sig_data = test_utils.SignSha256(b'fake-payload-content', test_utils._PRIVKEY_FILE_NAME) sigs_gen.AddSig(5 if fail_unknown_sig_version else 1, sig_data) sigs_data = sigs_gen.ToBinary() payload_gen.SetSignatures(payload_gen.curr_offset, len(sigs_data)) - if do_forge_pseudo_op: - assert sigs_data is not None, 'should have forged signatures blob by now' - sigs_len = len(sigs_data) - payload_gen.AddOperation( - False, common.OpType.REPLACE, - data_offset=payload_gen.curr_offset / 2, - data_length=sigs_len / 2, - dst_extents=[(0, (sigs_len / 2 + block_size - 1) / block_size)]) - # Generate payload (complete w/ signature) and create the test object. payload_checker = _GetPayloadChecker( payload_gen.WriteToFileWithData, payload_gen_dargs={ 'sigs_data': sigs_data, - 'privkey_file_name': test_utils._PRIVKEY_FILE_NAME, - 'do_add_pseudo_operation': not do_forge_pseudo_op}) + 'privkey_file_name': test_utils._PRIVKEY_FILE_NAME}) payload_checker.payload_type = checker._TYPE_FULL report = checker._PayloadReport() @@ -1095,8 +974,7 @@ class PayloadCheckerTest(mox.MoxTestBase): common.KERNEL: kernel_part_size }) - should_fail = (fail_empty_sigs_blob or fail_missing_pseudo_op or - fail_mismatched_pseudo_op or fail_sig_missing_fields or + should_fail = (fail_empty_sigs_blob or fail_sig_missing_fields or fail_unknown_sig_version or fail_incorrect_sig) args = (report, test_utils._PUBKEY_FILE_NAME) if should_fail: @@ -1120,7 +998,6 @@ class PayloadCheckerTest(mox.MoxTestBase): should_succeed = ( (minor_version == 0 and payload_type == checker._TYPE_FULL) or - (minor_version == 1 and payload_type == checker._TYPE_DELTA) or (minor_version == 2 and payload_type == checker._TYPE_DELTA) or (minor_version == 3 and payload_type == checker._TYPE_DELTA) or (minor_version == 4 and payload_type == checker._TYPE_DELTA) or @@ -1150,10 +1027,10 @@ class PayloadCheckerTest(mox.MoxTestBase): payload_gen.SetBlockSize(block_size) kernel_filesystem_size = test_utils.KiB(16) rootfs_filesystem_size = test_utils.MiB(2) - payload_gen.SetPartInfo(False, True, rootfs_filesystem_size, - hashlib.sha256('fake-new-rootfs-content').digest()) - payload_gen.SetPartInfo(True, True, kernel_filesystem_size, - hashlib.sha256('fake-new-kernel-content').digest()) + payload_gen.SetPartInfo(common.ROOTFS, True, rootfs_filesystem_size, + hashlib.sha256(b'fake-new-rootfs-content').digest()) + payload_gen.SetPartInfo(common.KERNEL, True, kernel_filesystem_size, + hashlib.sha256(b'fake-new-kernel-content').digest()) payload_gen.SetMinorVersion(0) rootfs_part_size = 0 @@ -1163,8 +1040,8 @@ class PayloadCheckerTest(mox.MoxTestBase): if fail_rootfs_part_size_exceeded: rootfs_op_size += block_size payload_gen.AddOperationWithData( - False, common.OpType.REPLACE, - dst_extents=[(0, rootfs_op_size / block_size)], + common.ROOTFS, common.OpType.REPLACE, + dst_extents=[(0, rootfs_op_size // block_size)], data_blob=os.urandom(rootfs_op_size)) kernel_part_size = 0 @@ -1174,8 +1051,8 @@ class PayloadCheckerTest(mox.MoxTestBase): if fail_kernel_part_size_exceeded: kernel_op_size += block_size payload_gen.AddOperationWithData( - True, common.OpType.REPLACE, - dst_extents=[(0, kernel_op_size / block_size)], + common.KERNEL, common.OpType.REPLACE, + dst_extents=[(0, kernel_op_size // block_size)], data_blob=os.urandom(kernel_op_size)) # Generate payload (complete w/ signature) and create the test object. @@ -1186,16 +1063,14 @@ class PayloadCheckerTest(mox.MoxTestBase): else: use_block_size = block_size - # For the unittests 246 is the value that generated for the payload. - metadata_size = 246 + # For the unittests 237 is the value that generated for the payload. + metadata_size = 237 if fail_mismatched_metadata_size: metadata_size += 1 kwargs = { 'payload_gen_dargs': { 'privkey_file_name': test_utils._PRIVKEY_FILE_NAME, - 'do_add_pseudo_operation': True, - 'is_pseudo_in_kernel': True, 'padding': os.urandom(1024) if fail_excess_data else None}, 'checker_init_dargs': { 'assert_type': 'delta' if fail_wrong_payload_type else 'full', @@ -1207,7 +1082,7 @@ class PayloadCheckerTest(mox.MoxTestBase): payload_checker = _GetPayloadChecker(payload_gen.WriteToFileWithData, **kwargs) - kwargs = { + kwargs2 = { 'pubkey_file_name': test_utils._PUBKEY_FILE_NAME, 'metadata_size': metadata_size, 'part_sizes': { @@ -1219,15 +1094,15 @@ class PayloadCheckerTest(mox.MoxTestBase): fail_rootfs_part_size_exceeded or fail_kernel_part_size_exceeded) if should_fail: - self.assertRaises(PayloadError, payload_checker.Run, **kwargs) + self.assertRaises(PayloadError, payload_checker.Run, **kwargs2) else: - self.assertIsNone(payload_checker.Run(**kwargs)) + self.assertIsNone(payload_checker.Run(**kwargs2)) + # This implements a generic API, hence the occasional unused args. # pylint: disable=W0613 -def ValidateCheckOperationTest(op_type_name, is_last, allow_signature, - allow_unhashed, fail_src_extents, - fail_dst_extents, +def ValidateCheckOperationTest(op_type_name, allow_unhashed, + fail_src_extents, fail_dst_extents, fail_mismatched_data_offset_length, fail_missing_dst_extents, fail_src_length, fail_dst_length, fail_data_hash, @@ -1244,8 +1119,8 @@ def ValidateCheckOperationTest(op_type_name, is_last, allow_signature, fail_bad_minor_version)): return False - # MOVE and SOURCE_COPY operations don't carry data. - if (op_type in (common.OpType.MOVE, common.OpType.SOURCE_COPY) and ( + # SOURCE_COPY operation does not carry data. + if (op_type == common.OpType.SOURCE_COPY and ( fail_mismatched_data_offset_length or fail_data_hash or fail_prev_data_offset)): return False @@ -1274,14 +1149,14 @@ def AddParametricTests(tested_method_name, arg_space, validate_func=None): (values) associated with them. validate_func: A function used for validating test argument combinations. """ - for value_tuple in itertools.product(*arg_space.itervalues()): - run_dargs = dict(zip(arg_space.iterkeys(), value_tuple)) + for value_tuple in itertools.product(*iter(arg_space.values())): + run_dargs = dict(zip(iter(arg_space.keys()), value_tuple)) if validate_func and not validate_func(**run_dargs): continue run_method_name = 'Do%sTest' % tested_method_name test_method_name = 'test%s' % tested_method_name - for arg_key, arg_val in run_dargs.iteritems(): - if arg_val or type(arg_val) is int: + for arg_key, arg_val in run_dargs.items(): + if arg_val or isinstance(arg_val, int): test_method_name += '__%s=%s' % (arg_key, arg_val) setattr(PayloadCheckerTest, test_method_name, TestMethodBody(run_method_name, run_dargs)) @@ -1328,11 +1203,8 @@ def AddAllParametricTests(): # Add all _CheckOperation() test cases. AddParametricTests('CheckOperation', {'op_type_name': ('REPLACE', 'REPLACE_BZ', 'REPLACE_XZ', - 'MOVE', 'BSDIFF', 'SOURCE_COPY', - 'SOURCE_BSDIFF', 'PUFFDIFF', - 'BROTLI_BSDIFF'), - 'is_last': (True, False), - 'allow_signature': (True, False), + 'SOURCE_COPY', 'SOURCE_BSDIFF', + 'PUFFDIFF', 'BROTLI_BSDIFF'), 'allow_unhashed': (True, False), 'fail_src_extents': (True, False), 'fail_dst_extents': (True, False), @@ -1352,15 +1224,13 @@ def AddAllParametricTests(): # Add all _CheckOperations() test cases. AddParametricTests('CheckSignatures', {'fail_empty_sigs_blob': (True, False), - 'fail_missing_pseudo_op': (True, False), - 'fail_mismatched_pseudo_op': (True, False), 'fail_sig_missing_fields': (True, False), 'fail_unknown_sig_version': (True, False), 'fail_incorrect_sig': (True, False)}) # Add all _CheckManifestMinorVersion() test cases. AddParametricTests('CheckManifestMinorVersion', - {'minor_version': (None, 0, 1, 2, 3, 4, 5, 555), + {'minor_version': (None, 0, 2, 3, 4, 5, 555), 'payload_type': (checker._TYPE_FULL, checker._TYPE_DELTA)}) diff --git a/scripts/update_payload/common.py b/scripts/update_payload/common.py index 9061a754..b934cf88 100644 --- a/scripts/update_payload/common.py +++ b/scripts/update_payload/common.py @@ -16,8 +16,11 @@ """Utilities for update payload processing.""" +from __future__ import absolute_import from __future__ import print_function +import base64 + from update_payload import update_metadata_pb2 from update_payload.error import PayloadError @@ -25,18 +28,14 @@ from update_payload.error import PayloadError # # Constants. # -PSEUDO_EXTENT_MARKER = (1L << 64) - 1 # UINT64_MAX - SIG_ASN1_HEADER = ( - '\x30\x31\x30\x0d\x06\x09\x60\x86' - '\x48\x01\x65\x03\x04\x02\x01\x05' - '\x00\x04\x20' + b'\x30\x31\x30\x0d\x06\x09\x60\x86' + b'\x48\x01\x65\x03\x04\x02\x01\x05' + b'\x00\x04\x20' ) -CHROMEOS_MAJOR_PAYLOAD_VERSION = 1 BRILLO_MAJOR_PAYLOAD_VERSION = 2 -INPLACE_MINOR_PAYLOAD_VERSION = 1 SOURCE_MINOR_PAYLOAD_VERSION = 2 OPSRCHASH_MINOR_PAYLOAD_VERSION = 3 BROTLI_BSDIFF_MINOR_PAYLOAD_VERSION = 4 @@ -47,6 +46,7 @@ ROOTFS = 'root' # Tuple of (name in system, name in protobuf). CROS_PARTITIONS = ((KERNEL, KERNEL), (ROOTFS, 'rootfs')) + # # Payload operation types. # @@ -55,8 +55,6 @@ class OpType(object): _CLASS = update_metadata_pb2.InstallOperation REPLACE = _CLASS.REPLACE REPLACE_BZ = _CLASS.REPLACE_BZ - MOVE = _CLASS.MOVE - BSDIFF = _CLASS.BSDIFF SOURCE_COPY = _CLASS.SOURCE_COPY SOURCE_BSDIFF = _CLASS.SOURCE_BSDIFF ZERO = _CLASS.ZERO @@ -64,13 +62,11 @@ class OpType(object): REPLACE_XZ = _CLASS.REPLACE_XZ PUFFDIFF = _CLASS.PUFFDIFF BROTLI_BSDIFF = _CLASS.BROTLI_BSDIFF - ALL = (REPLACE, REPLACE_BZ, MOVE, BSDIFF, SOURCE_COPY, SOURCE_BSDIFF, ZERO, + ALL = (REPLACE, REPLACE_BZ, SOURCE_COPY, SOURCE_BSDIFF, ZERO, DISCARD, REPLACE_XZ, PUFFDIFF, BROTLI_BSDIFF) NAMES = { REPLACE: 'REPLACE', REPLACE_BZ: 'REPLACE_BZ', - MOVE: 'MOVE', - BSDIFF: 'BSDIFF', SOURCE_COPY: 'SOURCE_COPY', SOURCE_BSDIFF: 'SOURCE_BSDIFF', ZERO: 'ZERO', @@ -146,7 +142,7 @@ def Read(file_obj, length, offset=None, hasher=None): try: data = file_obj.read(length) - except IOError, e: + except IOError as e: raise PayloadError('error reading from file (%s): %s' % (file_obj.name, e)) if len(data) != length: @@ -167,13 +163,12 @@ def FormatExtent(ex, block_size=0): end_block = ex.start_block + ex.num_blocks if block_size: return '%d->%d * %d' % (ex.start_block, end_block, block_size) - else: - return '%d->%d' % (ex.start_block, end_block) + return '%d->%d' % (ex.start_block, end_block) def FormatSha256(digest): """Returns a canonical string representation of a SHA256 digest.""" - return digest.encode('base64').strip() + return base64.b64encode(digest).decode('utf-8') # diff --git a/scripts/update_payload/format_utils.py b/scripts/update_payload/format_utils.py index 6248ba9b..e73badf3 100644 --- a/scripts/update_payload/format_utils.py +++ b/scripts/update_payload/format_utils.py @@ -16,6 +16,8 @@ """Various formatting functions.""" +from __future__ import division + def NumToPercent(num, total, min_precision=1, max_precision=5): """Returns the percentage (string) of |num| out of |total|. @@ -50,7 +52,7 @@ def NumToPercent(num, total, min_precision=1, max_precision=5): precision = min(min_precision, max_precision) factor = 10 ** precision while precision <= max_precision: - percent = num * 100 * factor / total + percent = num * 100 * factor // total if percent: break factor *= 10 @@ -102,8 +104,8 @@ def BytesToHumanReadable(size, precision=1, decimal=False): magnitude = next_magnitude if exp != 0: - whole = size / magnitude - frac = (size % magnitude) * (10 ** precision) / magnitude + whole = size // magnitude + frac = (size % magnitude) * (10 ** precision) // magnitude while frac and not frac % 10: frac /= 10 return '%d%s %s' % (whole, '.%d' % frac if frac else '', suffixes[exp - 1]) diff --git a/scripts/update_payload/format_utils_unittest.py b/scripts/update_payload/format_utils_unittest.py index 42ea621c..4dcd6527 100755 --- a/scripts/update_payload/format_utils_unittest.py +++ b/scripts/update_payload/format_utils_unittest.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2 +#!/usr/bin/env python # # Copyright (C) 2013 The Android Open Source Project # @@ -17,6 +17,11 @@ """Unit tests for format_utils.py.""" +# Disable check for function names to avoid errors based on old code +# pylint: disable-msg=invalid-name + +from __future__ import absolute_import + import unittest from update_payload import format_utils diff --git a/scripts/update_payload/histogram.py b/scripts/update_payload/histogram.py index 1ac2ab5d..bad2dc37 100644 --- a/scripts/update_payload/histogram.py +++ b/scripts/update_payload/histogram.py @@ -16,6 +16,9 @@ """Histogram generation tools.""" +from __future__ import absolute_import +from __future__ import division + from collections import defaultdict from update_payload import format_utils @@ -110,7 +113,7 @@ class Histogram(object): hist_bar = '|' for key, count in self.data: if self.total: - bar_len = count * self.scale / self.total + bar_len = count * self.scale // self.total hist_bar = '|%s|' % ('#' * bar_len).ljust(self.scale) line = '%s %s %s' % ( diff --git a/scripts/update_payload/histogram_unittest.py b/scripts/update_payload/histogram_unittest.py index e757dd02..ccde2bb1 100755 --- a/scripts/update_payload/histogram_unittest.py +++ b/scripts/update_payload/histogram_unittest.py @@ -1,4 +1,4 @@ -#!/usr/bin/python2 +#!/usr/bin/env python # # Copyright (C) 2013 The Android Open Source Project # @@ -17,6 +17,11 @@ """Unit tests for histogram.py.""" +# Disable check for function names to avoid errors based on old code +# pylint: disable-msg=invalid-name + +from __future__ import absolute_import + import unittest from update_payload import format_utils diff --git a/scripts/update_payload/payload.py b/scripts/update_payload/payload.py index 2a0cb58d..998703ad 100644 --- a/scripts/update_payload/payload.py +++ b/scripts/update_payload/payload.py @@ -16,10 +16,14 @@ """Tools for reading, verifying and applying Chrome OS update payloads.""" +from __future__ import absolute_import from __future__ import print_function import hashlib +import io +import mmap import struct +import zipfile from update_payload import applier from update_payload import checker @@ -64,7 +68,7 @@ class Payload(object): """Update payload header struct.""" # Header constants; sizes are in bytes. - _MAGIC = 'CrAU' + _MAGIC = b'CrAU' _VERSION_SIZE = 8 _MANIFEST_LEN_SIZE = 8 _METADATA_SIGNATURE_LEN_SIZE = 4 @@ -111,7 +115,6 @@ class Payload(object): payload_file, self._METADATA_SIGNATURE_LEN_SIZE, True, hasher=hasher) - def __init__(self, payload_file, payload_file_offset=0): """Initialize the payload object. @@ -119,7 +122,15 @@ class Payload(object): payload_file: update payload file object open for reading payload_file_offset: the offset of the actual payload """ - self.payload_file = payload_file + if zipfile.is_zipfile(payload_file): + with zipfile.ZipFile(payload_file) as zfp: + self.payload_file = zfp.open("payload.bin", "r") + elif isinstance(payload_file, str): + payload_fp = open(payload_file, "rb") + payload_bytes = mmap.mmap(payload_fp.fileno(), 0, access=mmap.ACCESS_READ) + self.payload_file = io.BytesIO(payload_bytes) + else: + self.payload_file = payload_file self.payload_file_offset = payload_file_offset self.manifest_hasher = None self.is_init = False @@ -226,31 +237,6 @@ class Payload(object): self.is_init = True - def Describe(self): - """Emits the payload embedded description data to standard output.""" - def _DescribeImageInfo(description, image_info): - """Display info about the image.""" - def _DisplayIndentedValue(name, value): - print(' {:<14} {}'.format(name+':', value)) - - print('%s:' % description) - _DisplayIndentedValue('Channel', image_info.channel) - _DisplayIndentedValue('Board', image_info.board) - _DisplayIndentedValue('Version', image_info.version) - _DisplayIndentedValue('Key', image_info.key) - - if image_info.build_channel != image_info.channel: - _DisplayIndentedValue('Build channel', image_info.build_channel) - - if image_info.build_version != image_info.version: - _DisplayIndentedValue('Build version', image_info.build_version) - - if self.manifest.HasField('old_image_info'): - _DescribeImageInfo('Old Image', self.manifest.old_image_info) - - if self.manifest.HasField('new_image_info'): - _DescribeImageInfo('New Image', self.manifest.new_image_info) - def _AssertInit(self): """Raises an exception if the object was not initialized.""" if not self.is_init: @@ -263,9 +249,7 @@ class Payload(object): def IsDelta(self): """Returns True iff the payload appears to be a delta.""" self._AssertInit() - return (self.manifest.HasField('old_kernel_info') or - self.manifest.HasField('old_rootfs_info') or - any(partition.HasField('old_partition_info') + return (any(partition.HasField('old_partition_info') for partition in self.manifest.partitions)) def IsFull(self): diff --git a/scripts/update_payload/test_utils.py b/scripts/update_payload/test_utils.py index 1e2259d4..e153669e 100644 --- a/scripts/update_payload/test_utils.py +++ b/scripts/update_payload/test_utils.py @@ -16,9 +16,10 @@ """Utilities for unit testing.""" +from __future__ import absolute_import from __future__ import print_function -import cStringIO +import io import hashlib import os import struct @@ -70,7 +71,7 @@ def _WriteInt(file_obj, size, is_unsigned, val): """ try: file_obj.write(struct.pack(common.IntPackingFmtStr(size, is_unsigned), val)) - except IOError, e: + except IOError as e: raise payload.PayloadError('error writing to file (%s): %s' % (file_obj.name, e)) @@ -173,31 +174,37 @@ class PayloadGenerator(object): self.block_size = block_size _SetMsgField(self.manifest, 'block_size', block_size) - def SetPartInfo(self, is_kernel, is_new, part_size, part_hash): + def SetPartInfo(self, part_name, is_new, part_size, part_hash): """Set the partition info entry. Args: - is_kernel: whether this is kernel partition info - is_new: whether to set old (False) or new (True) info - part_size: the partition size (in fact, filesystem size) - part_hash: the partition hash + part_name: The name of the partition. + is_new: Whether to set old (False) or new (True) info. + part_size: The partition size (in fact, filesystem size). + part_hash: The partition hash. """ - if is_kernel: - part_info = (self.manifest.new_kernel_info if is_new - else self.manifest.old_kernel_info) - else: - part_info = (self.manifest.new_rootfs_info if is_new - else self.manifest.old_rootfs_info) + partition = next((x for x in self.manifest.partitions + if x.partition_name == part_name), None) + if partition is None: + partition = self.manifest.partitions.add() + partition.partition_name = part_name + + part_info = (partition.new_partition_info if is_new + else partition.old_partition_info) _SetMsgField(part_info, 'size', part_size) _SetMsgField(part_info, 'hash', part_hash) - def AddOperation(self, is_kernel, op_type, data_offset=None, + def AddOperation(self, part_name, op_type, data_offset=None, data_length=None, src_extents=None, src_length=None, dst_extents=None, dst_length=None, data_sha256_hash=None): """Adds an InstallOperation entry.""" - operations = (self.manifest.kernel_install_operations if is_kernel - else self.manifest.install_operations) + partition = next((x for x in self.manifest.partitions + if x.partition_name == part_name), None) + if partition is None: + partition = self.manifest.partitions.add() + partition.partition_name = part_name + operations = partition.operations op = operations.add() op.type = op_type @@ -277,7 +284,7 @@ class EnhancedPayloadGenerator(PayloadGenerator): self.data_blobs.append(data_blob) return data_length, data_offset - def AddOperationWithData(self, is_kernel, op_type, src_extents=None, + def AddOperationWithData(self, part_name, op_type, src_extents=None, src_length=None, dst_extents=None, dst_length=None, data_blob=None, do_hash_data_blob=True): """Adds an install operation and associated data blob. @@ -287,12 +294,12 @@ class EnhancedPayloadGenerator(PayloadGenerator): necessary offset/length accounting. Args: - is_kernel: whether this is a kernel (True) or rootfs (False) operation - op_type: one of REPLACE, REPLACE_BZ, REPLACE_XZ, MOVE or BSDIFF + part_name: The name of the partition (e.g. kernel or root). + op_type: one of REPLACE, REPLACE_BZ, REPLACE_XZ. src_extents: list of (start, length) pairs indicating src block ranges - src_length: size of the src data in bytes (needed for BSDIFF) + src_length: size of the src data in bytes (needed for diff operations) dst_extents: list of (start, length) pairs indicating dst block ranges - dst_length: size of the dst data in bytes (needed for BSDIFF) + dst_length: size of the dst data in bytes (needed for diff operations) data_blob: a data blob associated with this operation do_hash_data_blob: whether or not to compute and add a data blob hash """ @@ -302,15 +309,13 @@ class EnhancedPayloadGenerator(PayloadGenerator): data_sha256_hash = hashlib.sha256(data_blob).digest() data_length, data_offset = self.AddData(data_blob) - self.AddOperation(is_kernel, op_type, data_offset=data_offset, + self.AddOperation(part_name, op_type, data_offset=data_offset, data_length=data_length, src_extents=src_extents, src_length=src_length, dst_extents=dst_extents, dst_length=dst_length, data_sha256_hash=data_sha256_hash) def WriteToFileWithData(self, file_obj, sigs_data=None, - privkey_file_name=None, - do_add_pseudo_operation=False, - is_pseudo_in_kernel=False, padding=None): + privkey_file_name=None, padding=None): """Writes the payload content to a file, optionally signing the content. Args: @@ -319,10 +324,6 @@ class EnhancedPayloadGenerator(PayloadGenerator): payload signature fields assumed to be preset by the caller) privkey_file_name: key used for signing the payload (optional; used only if explicit signatures blob not provided) - do_add_pseudo_operation: whether a pseudo-operation should be added to - account for the signature blob - is_pseudo_in_kernel: whether the pseudo-operation should be added to - kernel (True) or rootfs (False) operations padding: stuff to dump past the normal data blobs provided (optional) Raises: @@ -335,7 +336,7 @@ class EnhancedPayloadGenerator(PayloadGenerator): if do_generate_sigs_data: # First, sign some arbitrary data to obtain the size of a signature blob. - fake_sig = SignSha256('fake-payload-data', privkey_file_name) + fake_sig = SignSha256(b'fake-payload-data', privkey_file_name) fake_sigs_gen = SignaturesGenerator() fake_sigs_gen.AddSig(1, fake_sig) sigs_len = len(fake_sigs_gen.ToBinary()) @@ -343,20 +344,9 @@ class EnhancedPayloadGenerator(PayloadGenerator): # Update the payload with proper signature attributes. self.SetSignatures(self.curr_offset, sigs_len) - # Add a pseudo-operation to account for the signature blob, if requested. - if do_add_pseudo_operation: - if not self.block_size: - raise TestError('cannot add pseudo-operation without knowing the ' - 'payload block size') - self.AddOperation( - is_pseudo_in_kernel, common.OpType.REPLACE, - data_offset=self.curr_offset, data_length=sigs_len, - dst_extents=[(common.PSEUDO_EXTENT_MARKER, - (sigs_len + self.block_size - 1) / self.block_size)]) - if do_generate_sigs_data: # Once all payload fields are updated, dump and sign it. - temp_payload_file = cStringIO.StringIO() + temp_payload_file = io.BytesIO() self.WriteToFile(temp_payload_file, data_blobs=self.data_blobs) sig = SignSha256(temp_payload_file.getvalue(), privkey_file_name) sigs_gen = SignaturesGenerator() diff --git a/scripts/update_payload/update_metadata_pb2.py b/scripts/update_payload/update_metadata_pb2.py index cb8f4c22..9aef9f2f 100644 --- a/scripts/update_payload/update_metadata_pb2.py +++ b/scripts/update_payload/update_metadata_pb2.py @@ -20,7 +20,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( package='chromeos_update_engine', syntax='proto2', serialized_options=_b('H\003'), - serialized_pb=_b('\n\x15update_metadata.proto\x12\x16\x63hromeos_update_engine\"1\n\x06\x45xtent\x12\x13\n\x0bstart_block\x18\x01 \x01(\x04\x12\x12\n\nnum_blocks\x18\x02 \x01(\x04\"z\n\nSignatures\x12@\n\nsignatures\x18\x01 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x1a*\n\tSignature\x12\x0f\n\x07version\x18\x01 \x01(\r\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"+\n\rPartitionInfo\x12\x0c\n\x04size\x18\x01 \x01(\x04\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\"w\n\tImageInfo\x12\r\n\x05\x62oard\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12\x0f\n\x07\x63hannel\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x15\n\rbuild_channel\x18\x05 \x01(\t\x12\x15\n\rbuild_version\x18\x06 \x01(\t\"\xee\x03\n\x10InstallOperation\x12;\n\x04type\x18\x01 \x02(\x0e\x32-.chromeos_update_engine.InstallOperation.Type\x12\x13\n\x0b\x64\x61ta_offset\x18\x02 \x01(\x04\x12\x13\n\x0b\x64\x61ta_length\x18\x03 \x01(\x04\x12\x33\n\x0bsrc_extents\x18\x04 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\nsrc_length\x18\x05 \x01(\x04\x12\x33\n\x0b\x64st_extents\x18\x06 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\ndst_length\x18\x07 \x01(\x04\x12\x18\n\x10\x64\x61ta_sha256_hash\x18\x08 \x01(\x0c\x12\x17\n\x0fsrc_sha256_hash\x18\t \x01(\x0c\"\xad\x01\n\x04Type\x12\x0b\n\x07REPLACE\x10\x00\x12\x0e\n\nREPLACE_BZ\x10\x01\x12\x0c\n\x04MOVE\x10\x02\x1a\x02\x08\x01\x12\x0e\n\x06\x42SDIFF\x10\x03\x1a\x02\x08\x01\x12\x0f\n\x0bSOURCE_COPY\x10\x04\x12\x11\n\rSOURCE_BSDIFF\x10\x05\x12\x0e\n\nREPLACE_XZ\x10\x08\x12\x08\n\x04ZERO\x10\x06\x12\x0b\n\x07\x44ISCARD\x10\x07\x12\x11\n\rBROTLI_BSDIFF\x10\n\x12\x0c\n\x08PUFFDIFF\x10\t\"\xd7\x05\n\x0fPartitionUpdate\x12\x16\n\x0epartition_name\x18\x01 \x02(\t\x12\x17\n\x0frun_postinstall\x18\x02 \x01(\x08\x12\x18\n\x10postinstall_path\x18\x03 \x01(\t\x12\x17\n\x0f\x66ilesystem_type\x18\x04 \x01(\t\x12M\n\x17new_partition_signature\x18\x05 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x12\x41\n\x12old_partition_info\x18\x06 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12\x41\n\x12new_partition_info\x18\x07 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12<\n\noperations\x18\x08 \x03(\x0b\x32(.chromeos_update_engine.InstallOperation\x12\x1c\n\x14postinstall_optional\x18\t \x01(\x08\x12=\n\x15hash_tree_data_extent\x18\n \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x38\n\x10hash_tree_extent\x18\x0b \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x1b\n\x13hash_tree_algorithm\x18\x0c \x01(\t\x12\x16\n\x0ehash_tree_salt\x18\r \x01(\x0c\x12\x37\n\x0f\x66\x65\x63_data_extent\x18\x0e \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x32\n\nfec_extent\x18\x0f \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x14\n\tfec_roots\x18\x10 \x01(\r:\x01\x32\"L\n\x15\x44ynamicPartitionGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0c\n\x04size\x18\x02 \x01(\x04\x12\x17\n\x0fpartition_names\x18\x03 \x03(\t\"s\n\x18\x44ynamicPartitionMetadata\x12=\n\x06groups\x18\x01 \x03(\x0b\x32-.chromeos_update_engine.DynamicPartitionGroup\x12\x18\n\x10snapshot_enabled\x18\x02 \x01(\x08\"\xb1\x06\n\x14\x44\x65ltaArchiveManifest\x12\x44\n\x12install_operations\x18\x01 \x03(\x0b\x32(.chromeos_update_engine.InstallOperation\x12K\n\x19kernel_install_operations\x18\x02 \x03(\x0b\x32(.chromeos_update_engine.InstallOperation\x12\x18\n\nblock_size\x18\x03 \x01(\r:\x04\x34\x30\x39\x36\x12\x19\n\x11signatures_offset\x18\x04 \x01(\x04\x12\x17\n\x0fsignatures_size\x18\x05 \x01(\x04\x12>\n\x0fold_kernel_info\x18\x06 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12>\n\x0fnew_kernel_info\x18\x07 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12>\n\x0fold_rootfs_info\x18\x08 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12>\n\x0fnew_rootfs_info\x18\t \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12\x39\n\x0eold_image_info\x18\n \x01(\x0b\x32!.chromeos_update_engine.ImageInfo\x12\x39\n\x0enew_image_info\x18\x0b \x01(\x0b\x32!.chromeos_update_engine.ImageInfo\x12\x18\n\rminor_version\x18\x0c \x01(\r:\x01\x30\x12;\n\npartitions\x18\r \x03(\x0b\x32\'.chromeos_update_engine.PartitionUpdate\x12\x15\n\rmax_timestamp\x18\x0e \x01(\x03\x12T\n\x1a\x64ynamic_partition_metadata\x18\x0f \x01(\x0b\x32\x30.chromeos_update_engine.DynamicPartitionMetadataB\x02H\x03') + serialized_pb=_b('\n\x15update_metadata.proto\x12\x16\x63hromeos_update_engine\"1\n\x06\x45xtent\x12\x13\n\x0bstart_block\x18\x01 \x01(\x04\x12\x12\n\nnum_blocks\x18\x02 \x01(\x04\"\x9f\x01\n\nSignatures\x12@\n\nsignatures\x18\x01 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x1aO\n\tSignature\x12\x13\n\x07version\x18\x01 \x01(\rB\x02\x18\x01\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x1f\n\x17unpadded_signature_size\x18\x03 \x01(\x07\"+\n\rPartitionInfo\x12\x0c\n\x04size\x18\x01 \x01(\x04\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\"\x8f\x01\n\tImageInfo\x12\x11\n\x05\x62oard\x18\x01 \x01(\tB\x02\x18\x01\x12\x0f\n\x03key\x18\x02 \x01(\tB\x02\x18\x01\x12\x13\n\x07\x63hannel\x18\x03 \x01(\tB\x02\x18\x01\x12\x13\n\x07version\x18\x04 \x01(\tB\x02\x18\x01\x12\x19\n\rbuild_channel\x18\x05 \x01(\tB\x02\x18\x01\x12\x19\n\rbuild_version\x18\x06 \x01(\tB\x02\x18\x01\"\xee\x03\n\x10InstallOperation\x12;\n\x04type\x18\x01 \x02(\x0e\x32-.chromeos_update_engine.InstallOperation.Type\x12\x13\n\x0b\x64\x61ta_offset\x18\x02 \x01(\x04\x12\x13\n\x0b\x64\x61ta_length\x18\x03 \x01(\x04\x12\x33\n\x0bsrc_extents\x18\x04 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\nsrc_length\x18\x05 \x01(\x04\x12\x33\n\x0b\x64st_extents\x18\x06 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\ndst_length\x18\x07 \x01(\x04\x12\x18\n\x10\x64\x61ta_sha256_hash\x18\x08 \x01(\x0c\x12\x17\n\x0fsrc_sha256_hash\x18\t \x01(\x0c\"\xad\x01\n\x04Type\x12\x0b\n\x07REPLACE\x10\x00\x12\x0e\n\nREPLACE_BZ\x10\x01\x12\x0c\n\x04MOVE\x10\x02\x1a\x02\x08\x01\x12\x0e\n\x06\x42SDIFF\x10\x03\x1a\x02\x08\x01\x12\x0f\n\x0bSOURCE_COPY\x10\x04\x12\x11\n\rSOURCE_BSDIFF\x10\x05\x12\x0e\n\nREPLACE_XZ\x10\x08\x12\x08\n\x04ZERO\x10\x06\x12\x0b\n\x07\x44ISCARD\x10\x07\x12\x11\n\rBROTLI_BSDIFF\x10\n\x12\x0c\n\x08PUFFDIFF\x10\t\"\xcf\x01\n\x11\x43owMergeOperation\x12<\n\x04type\x18\x01 \x01(\x0e\x32..chromeos_update_engine.CowMergeOperation.Type\x12\x32\n\nsrc_extent\x18\x02 \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x32\n\ndst_extent\x18\x03 \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\"\x14\n\x04Type\x12\x0c\n\x08\x43OW_COPY\x10\x00\"\xc8\x06\n\x0fPartitionUpdate\x12\x16\n\x0epartition_name\x18\x01 \x02(\t\x12\x17\n\x0frun_postinstall\x18\x02 \x01(\x08\x12\x18\n\x10postinstall_path\x18\x03 \x01(\t\x12\x17\n\x0f\x66ilesystem_type\x18\x04 \x01(\t\x12M\n\x17new_partition_signature\x18\x05 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x12\x41\n\x12old_partition_info\x18\x06 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12\x41\n\x12new_partition_info\x18\x07 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12<\n\noperations\x18\x08 \x03(\x0b\x32(.chromeos_update_engine.InstallOperation\x12\x1c\n\x14postinstall_optional\x18\t \x01(\x08\x12=\n\x15hash_tree_data_extent\x18\n \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x38\n\x10hash_tree_extent\x18\x0b \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x1b\n\x13hash_tree_algorithm\x18\x0c \x01(\t\x12\x16\n\x0ehash_tree_salt\x18\r \x01(\x0c\x12\x37\n\x0f\x66\x65\x63_data_extent\x18\x0e \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x32\n\nfec_extent\x18\x0f \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x14\n\tfec_roots\x18\x10 \x01(\r:\x01\x32\x12\x0f\n\x07version\x18\x11 \x01(\t\x12\x43\n\x10merge_operations\x18\x12 \x03(\x0b\x32).chromeos_update_engine.CowMergeOperation\x12\x19\n\x11\x65stimate_cow_size\x18\x13 \x01(\x04\"L\n\x15\x44ynamicPartitionGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0c\n\x04size\x18\x02 \x01(\x04\x12\x17\n\x0fpartition_names\x18\x03 \x03(\t\"\xa9\x01\n\x18\x44ynamicPartitionMetadata\x12=\n\x06groups\x18\x01 \x03(\x0b\x32-.chromeos_update_engine.DynamicPartitionGroup\x12\x18\n\x10snapshot_enabled\x18\x02 \x01(\x08\x12\x14\n\x0cvabc_enabled\x18\x03 \x01(\x08\x12\x1e\n\x16vabc_compression_param\x18\x04 \x01(\t\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"C\n\x0c\x41pexMetadata\x12\x33\n\tapex_info\x18\x01 \x03(\x0b\x32 .chromeos_update_engine.ApexInfo\"\x9e\x07\n\x14\x44\x65ltaArchiveManifest\x12H\n\x12install_operations\x18\x01 \x03(\x0b\x32(.chromeos_update_engine.InstallOperationB\x02\x18\x01\x12O\n\x19kernel_install_operations\x18\x02 \x03(\x0b\x32(.chromeos_update_engine.InstallOperationB\x02\x18\x01\x12\x18\n\nblock_size\x18\x03 \x01(\r:\x04\x34\x30\x39\x36\x12\x19\n\x11signatures_offset\x18\x04 \x01(\x04\x12\x17\n\x0fsignatures_size\x18\x05 \x01(\x04\x12\x42\n\x0fold_kernel_info\x18\x06 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfoB\x02\x18\x01\x12\x42\n\x0fnew_kernel_info\x18\x07 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfoB\x02\x18\x01\x12\x42\n\x0fold_rootfs_info\x18\x08 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfoB\x02\x18\x01\x12\x42\n\x0fnew_rootfs_info\x18\t \x01(\x0b\x32%.chromeos_update_engine.PartitionInfoB\x02\x18\x01\x12=\n\x0eold_image_info\x18\n \x01(\x0b\x32!.chromeos_update_engine.ImageInfoB\x02\x18\x01\x12=\n\x0enew_image_info\x18\x0b \x01(\x0b\x32!.chromeos_update_engine.ImageInfoB\x02\x18\x01\x12\x18\n\rminor_version\x18\x0c \x01(\r:\x01\x30\x12;\n\npartitions\x18\r \x03(\x0b\x32\'.chromeos_update_engine.PartitionUpdate\x12\x15\n\rmax_timestamp\x18\x0e \x01(\x03\x12T\n\x1a\x64ynamic_partition_metadata\x18\x0f \x01(\x0b\x32\x30.chromeos_update_engine.DynamicPartitionMetadata\x12\x16\n\x0epartial_update\x18\x10 \x01(\x08\x12\x33\n\tapex_info\x18\x11 \x03(\x0b\x32 .chromeos_update_engine.ApexInfoB\x02H\x03') ) @@ -78,11 +78,29 @@ _INSTALLOPERATION_TYPE = _descriptor.EnumDescriptor( ], containing_type=None, serialized_options=None, - serialized_start=712, - serialized_end=885, + serialized_start=775, + serialized_end=948, ) _sym_db.RegisterEnumDescriptor(_INSTALLOPERATION_TYPE) +_COWMERGEOPERATION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='chromeos_update_engine.CowMergeOperation.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='COW_COPY', index=0, number=0, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1138, + serialized_end=1158, +) +_sym_db.RegisterEnumDescriptor(_COWMERGEOPERATION_TYPE) + _EXTENT = _descriptor.Descriptor( name='Extent', @@ -135,7 +153,7 @@ _SIGNATURES_SIGNATURE = _descriptor.Descriptor( has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='data', full_name='chromeos_update_engine.Signatures.Signature.data', index=1, number=2, type=12, cpp_type=9, label=1, @@ -143,6 +161,13 @@ _SIGNATURES_SIGNATURE = _descriptor.Descriptor( message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='unpadded_signature_size', full_name='chromeos_update_engine.Signatures.Signature.unpadded_signature_size', index=2, + number=3, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -155,8 +180,8 @@ _SIGNATURES_SIGNATURE = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=180, - serialized_end=222, + serialized_start=181, + serialized_end=260, ) _SIGNATURES = _descriptor.Descriptor( @@ -185,8 +210,8 @@ _SIGNATURES = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=100, - serialized_end=222, + serialized_start=101, + serialized_end=260, ) @@ -223,8 +248,8 @@ _PARTITIONINFO = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=224, - serialized_end=267, + serialized_start=262, + serialized_end=305, ) @@ -241,42 +266,42 @@ _IMAGEINFO = _descriptor.Descriptor( has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='key', full_name='chromeos_update_engine.ImageInfo.key', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='channel', full_name='chromeos_update_engine.ImageInfo.channel', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='version', full_name='chromeos_update_engine.ImageInfo.version', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='build_channel', full_name='chromeos_update_engine.ImageInfo.build_channel', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='build_version', full_name='chromeos_update_engine.ImageInfo.build_version', index=5, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), ], extensions=[ ], @@ -289,8 +314,8 @@ _IMAGEINFO = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=269, - serialized_end=388, + serialized_start=308, + serialized_end=451, ) @@ -377,8 +402,54 @@ _INSTALLOPERATION = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=391, - serialized_end=885, + serialized_start=454, + serialized_end=948, +) + + +_COWMERGEOPERATION = _descriptor.Descriptor( + name='CowMergeOperation', + full_name='chromeos_update_engine.CowMergeOperation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='chromeos_update_engine.CowMergeOperation.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='src_extent', full_name='chromeos_update_engine.CowMergeOperation.src_extent', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dst_extent', full_name='chromeos_update_engine.CowMergeOperation.dst_extent', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _COWMERGEOPERATION_TYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=951, + serialized_end=1158, ) @@ -501,6 +572,27 @@ _PARTITIONUPDATE = _descriptor.Descriptor( message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='chromeos_update_engine.PartitionUpdate.version', index=16, + number=17, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='merge_operations', full_name='chromeos_update_engine.PartitionUpdate.merge_operations', index=17, + number=18, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='estimate_cow_size', full_name='chromeos_update_engine.PartitionUpdate.estimate_cow_size', index=18, + number=19, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -513,8 +605,8 @@ _PARTITIONUPDATE = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=888, - serialized_end=1615, + serialized_start=1161, + serialized_end=2001, ) @@ -558,8 +650,8 @@ _DYNAMICPARTITIONGROUP = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1617, - serialized_end=1693, + serialized_start=2003, + serialized_end=2079, ) @@ -584,6 +676,103 @@ _DYNAMICPARTITIONMETADATA = _descriptor.Descriptor( message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vabc_enabled', full_name='chromeos_update_engine.DynamicPartitionMetadata.vabc_enabled', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='vabc_compression_param', full_name='chromeos_update_engine.DynamicPartitionMetadata.vabc_compression_param', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2082, + serialized_end=2251, +) + + +_APEXINFO = _descriptor.Descriptor( + name='ApexInfo', + full_name='chromeos_update_engine.ApexInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='package_name', full_name='chromeos_update_engine.ApexInfo.package_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='chromeos_update_engine.ApexInfo.version', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_compressed', full_name='chromeos_update_engine.ApexInfo.is_compressed', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='decompressed_size', full_name='chromeos_update_engine.ApexInfo.decompressed_size', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2253, + serialized_end=2352, +) + + +_APEXMETADATA = _descriptor.Descriptor( + name='ApexMetadata', + full_name='chromeos_update_engine.ApexMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='apex_info', full_name='chromeos_update_engine.ApexMetadata.apex_info', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -596,8 +785,8 @@ _DYNAMICPARTITIONMETADATA = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1695, - serialized_end=1810, + serialized_start=2354, + serialized_end=2421, ) @@ -614,14 +803,14 @@ _DELTAARCHIVEMANIFEST = _descriptor.Descriptor( has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='kernel_install_operations', full_name='chromeos_update_engine.DeltaArchiveManifest.kernel_install_operations', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='block_size', full_name='chromeos_update_engine.DeltaArchiveManifest.block_size', index=2, number=3, type=13, cpp_type=3, label=1, @@ -649,42 +838,42 @@ _DELTAARCHIVEMANIFEST = _descriptor.Descriptor( has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='new_kernel_info', full_name='chromeos_update_engine.DeltaArchiveManifest.new_kernel_info', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='old_rootfs_info', full_name='chromeos_update_engine.DeltaArchiveManifest.old_rootfs_info', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='new_rootfs_info', full_name='chromeos_update_engine.DeltaArchiveManifest.new_rootfs_info', index=8, number=9, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='old_image_info', full_name='chromeos_update_engine.DeltaArchiveManifest.old_image_info', index=9, number=10, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='new_image_info', full_name='chromeos_update_engine.DeltaArchiveManifest.new_image_info', index=10, number=11, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), + serialized_options=_b('\030\001'), file=DESCRIPTOR), _descriptor.FieldDescriptor( name='minor_version', full_name='chromeos_update_engine.DeltaArchiveManifest.minor_version', index=11, number=12, type=13, cpp_type=3, label=1, @@ -713,6 +902,20 @@ _DELTAARCHIVEMANIFEST = _descriptor.Descriptor( message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='partial_update', full_name='chromeos_update_engine.DeltaArchiveManifest.partial_update', index=15, + number=16, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='apex_info', full_name='chromeos_update_engine.DeltaArchiveManifest.apex_info', index=16, + number=17, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -725,8 +928,8 @@ _DELTAARCHIVEMANIFEST = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1813, - serialized_end=2630, + serialized_start=2424, + serialized_end=3350, ) _SIGNATURES_SIGNATURE.containing_type = _SIGNATURES @@ -735,6 +938,10 @@ _INSTALLOPERATION.fields_by_name['type'].enum_type = _INSTALLOPERATION_TYPE _INSTALLOPERATION.fields_by_name['src_extents'].message_type = _EXTENT _INSTALLOPERATION.fields_by_name['dst_extents'].message_type = _EXTENT _INSTALLOPERATION_TYPE.containing_type = _INSTALLOPERATION +_COWMERGEOPERATION.fields_by_name['type'].enum_type = _COWMERGEOPERATION_TYPE +_COWMERGEOPERATION.fields_by_name['src_extent'].message_type = _EXTENT +_COWMERGEOPERATION.fields_by_name['dst_extent'].message_type = _EXTENT +_COWMERGEOPERATION_TYPE.containing_type = _COWMERGEOPERATION _PARTITIONUPDATE.fields_by_name['new_partition_signature'].message_type = _SIGNATURES_SIGNATURE _PARTITIONUPDATE.fields_by_name['old_partition_info'].message_type = _PARTITIONINFO _PARTITIONUPDATE.fields_by_name['new_partition_info'].message_type = _PARTITIONINFO @@ -743,7 +950,9 @@ _PARTITIONUPDATE.fields_by_name['hash_tree_data_extent'].message_type = _EXTENT _PARTITIONUPDATE.fields_by_name['hash_tree_extent'].message_type = _EXTENT _PARTITIONUPDATE.fields_by_name['fec_data_extent'].message_type = _EXTENT _PARTITIONUPDATE.fields_by_name['fec_extent'].message_type = _EXTENT +_PARTITIONUPDATE.fields_by_name['merge_operations'].message_type = _COWMERGEOPERATION _DYNAMICPARTITIONMETADATA.fields_by_name['groups'].message_type = _DYNAMICPARTITIONGROUP +_APEXMETADATA.fields_by_name['apex_info'].message_type = _APEXINFO _DELTAARCHIVEMANIFEST.fields_by_name['install_operations'].message_type = _INSTALLOPERATION _DELTAARCHIVEMANIFEST.fields_by_name['kernel_install_operations'].message_type = _INSTALLOPERATION _DELTAARCHIVEMANIFEST.fields_by_name['old_kernel_info'].message_type = _PARTITIONINFO @@ -754,14 +963,18 @@ _DELTAARCHIVEMANIFEST.fields_by_name['old_image_info'].message_type = _IMAGEINFO _DELTAARCHIVEMANIFEST.fields_by_name['new_image_info'].message_type = _IMAGEINFO _DELTAARCHIVEMANIFEST.fields_by_name['partitions'].message_type = _PARTITIONUPDATE _DELTAARCHIVEMANIFEST.fields_by_name['dynamic_partition_metadata'].message_type = _DYNAMICPARTITIONMETADATA +_DELTAARCHIVEMANIFEST.fields_by_name['apex_info'].message_type = _APEXINFO DESCRIPTOR.message_types_by_name['Extent'] = _EXTENT DESCRIPTOR.message_types_by_name['Signatures'] = _SIGNATURES DESCRIPTOR.message_types_by_name['PartitionInfo'] = _PARTITIONINFO DESCRIPTOR.message_types_by_name['ImageInfo'] = _IMAGEINFO DESCRIPTOR.message_types_by_name['InstallOperation'] = _INSTALLOPERATION +DESCRIPTOR.message_types_by_name['CowMergeOperation'] = _COWMERGEOPERATION DESCRIPTOR.message_types_by_name['PartitionUpdate'] = _PARTITIONUPDATE DESCRIPTOR.message_types_by_name['DynamicPartitionGroup'] = _DYNAMICPARTITIONGROUP DESCRIPTOR.message_types_by_name['DynamicPartitionMetadata'] = _DYNAMICPARTITIONMETADATA +DESCRIPTOR.message_types_by_name['ApexInfo'] = _APEXINFO +DESCRIPTOR.message_types_by_name['ApexMetadata'] = _APEXMETADATA DESCRIPTOR.message_types_by_name['DeltaArchiveManifest'] = _DELTAARCHIVEMANIFEST _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -808,6 +1021,13 @@ InstallOperation = _reflection.GeneratedProtocolMessageType('InstallOperation', }) _sym_db.RegisterMessage(InstallOperation) +CowMergeOperation = _reflection.GeneratedProtocolMessageType('CowMergeOperation', (_message.Message,), { + 'DESCRIPTOR' : _COWMERGEOPERATION, + '__module__' : 'update_metadata_pb2' + # @@protoc_insertion_point(class_scope:chromeos_update_engine.CowMergeOperation) + }) +_sym_db.RegisterMessage(CowMergeOperation) + PartitionUpdate = _reflection.GeneratedProtocolMessageType('PartitionUpdate', (_message.Message,), { 'DESCRIPTOR' : _PARTITIONUPDATE, '__module__' : 'update_metadata_pb2' @@ -829,6 +1049,20 @@ DynamicPartitionMetadata = _reflection.GeneratedProtocolMessageType('DynamicPart }) _sym_db.RegisterMessage(DynamicPartitionMetadata) +ApexInfo = _reflection.GeneratedProtocolMessageType('ApexInfo', (_message.Message,), { + 'DESCRIPTOR' : _APEXINFO, + '__module__' : 'update_metadata_pb2' + # @@protoc_insertion_point(class_scope:chromeos_update_engine.ApexInfo) + }) +_sym_db.RegisterMessage(ApexInfo) + +ApexMetadata = _reflection.GeneratedProtocolMessageType('ApexMetadata', (_message.Message,), { + 'DESCRIPTOR' : _APEXMETADATA, + '__module__' : 'update_metadata_pb2' + # @@protoc_insertion_point(class_scope:chromeos_update_engine.ApexMetadata) + }) +_sym_db.RegisterMessage(ApexMetadata) + DeltaArchiveManifest = _reflection.GeneratedProtocolMessageType('DeltaArchiveManifest', (_message.Message,), { 'DESCRIPTOR' : _DELTAARCHIVEMANIFEST, '__module__' : 'update_metadata_pb2' @@ -838,6 +1072,21 @@ _sym_db.RegisterMessage(DeltaArchiveManifest) DESCRIPTOR._options = None +_SIGNATURES_SIGNATURE.fields_by_name['version']._options = None +_IMAGEINFO.fields_by_name['board']._options = None +_IMAGEINFO.fields_by_name['key']._options = None +_IMAGEINFO.fields_by_name['channel']._options = None +_IMAGEINFO.fields_by_name['version']._options = None +_IMAGEINFO.fields_by_name['build_channel']._options = None +_IMAGEINFO.fields_by_name['build_version']._options = None _INSTALLOPERATION_TYPE.values_by_name["MOVE"]._options = None _INSTALLOPERATION_TYPE.values_by_name["BSDIFF"]._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['install_operations']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['kernel_install_operations']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['old_kernel_info']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['new_kernel_info']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['old_rootfs_info']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['new_rootfs_info']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['old_image_info']._options = None +_DELTAARCHIVEMANIFEST.fields_by_name['new_image_info']._options = None # @@protoc_insertion_point(module_scope) |