blob: 29ccb8e5627db24fbd39d852961ff5c83ebba3f9 [file] [log] [blame]
Amin Hassanif94b6432018-01-26 17:39:47 -08001#
2# Copyright (C) 2013 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15#
Gilad Arnold553b0ec2013-01-26 01:00:39 -080016
17"""Applying a Chrome OS update payload.
18
19This module is used internally by the main Payload class for applying an update
20payload. The interface for invoking the applier is as follows:
21
22 applier = PayloadApplier(payload)
23 applier.Run(...)
24
25"""
26
Andrew Lassalle165843c2019-11-05 13:30:34 -080027from __future__ import absolute_import
Allie Wood12f59aa2015-04-06 11:05:12 -070028from __future__ import print_function
29
Gilad Arnold553b0ec2013-01-26 01:00:39 -080030import array
31import bz2
32import hashlib
Amin Hassani0de7f782017-12-07 12:13:03 -080033# Not everywhere we can have the lzma library so we ignore it if we didn't have
34# it because it is not going to be used. For example, 'cros flash' uses
35# devserver code which eventually loads this file, but the lzma library is not
36# included in the client test devices, and it is not necessary to do so. But
37# lzma is not used in 'cros flash' so it should be fine. Python 3.x include
38# lzma, but for backward compatibility with Python 2.7, backports-lzma is
39# needed.
40try:
41 import lzma
42except ImportError:
43 try:
44 from backports import lzma
45 except ImportError:
46 pass
Gilad Arnold553b0ec2013-01-26 01:00:39 -080047import os
Gilad Arnold553b0ec2013-01-26 01:00:39 -080048import subprocess
49import sys
50import tempfile
51
Amin Hassanib05a65a2017-12-18 15:15:32 -080052from update_payload import common
53from update_payload.error import PayloadError
Gilad Arnold553b0ec2013-01-26 01:00:39 -080054
Gilad Arnold553b0ec2013-01-26 01:00:39 -080055#
56# Helper functions.
57#
Gilad Arnold382df5c2013-05-03 12:49:28 -070058def _VerifySha256(file_obj, expected_hash, name, length=-1):
Gilad Arnold553b0ec2013-01-26 01:00:39 -080059 """Verifies the SHA256 hash of a file.
60
61 Args:
62 file_obj: file object to read
63 expected_hash: the hash digest we expect to be getting
64 name: name string of this hash, for error reporting
Gilad Arnold382df5c2013-05-03 12:49:28 -070065 length: precise length of data to verify (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -070066
Gilad Arnold553b0ec2013-01-26 01:00:39 -080067 Raises:
Gilad Arnold382df5c2013-05-03 12:49:28 -070068 PayloadError if computed hash doesn't match expected one, or if fails to
69 read the specified length of data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -080070 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -080071 hasher = hashlib.sha256()
72 block_length = 1024 * 1024
Andrew Lassalle165843c2019-11-05 13:30:34 -080073 max_length = length if length >= 0 else sys.maxsize
Gilad Arnold553b0ec2013-01-26 01:00:39 -080074
Gilad Arnold382df5c2013-05-03 12:49:28 -070075 while max_length > 0:
Gilad Arnold553b0ec2013-01-26 01:00:39 -080076 read_length = min(max_length, block_length)
77 data = file_obj.read(read_length)
78 if not data:
79 break
80 max_length -= len(data)
81 hasher.update(data)
82
Gilad Arnold382df5c2013-05-03 12:49:28 -070083 if length >= 0 and max_length > 0:
84 raise PayloadError(
85 'insufficient data (%d instead of %d) when verifying %s' %
86 (length - max_length, length, name))
87
Gilad Arnold553b0ec2013-01-26 01:00:39 -080088 actual_hash = hasher.digest()
89 if actual_hash != expected_hash:
90 raise PayloadError('%s hash (%s) not as expected (%s)' %
Gilad Arnold96405372013-05-04 00:24:58 -070091 (name, common.FormatSha256(actual_hash),
92 common.FormatSha256(expected_hash)))
Gilad Arnold553b0ec2013-01-26 01:00:39 -080093
94
95def _ReadExtents(file_obj, extents, block_size, max_length=-1):
96 """Reads data from file as defined by extent sequence.
97
98 This tries to be efficient by not copying data as it is read in chunks.
99
100 Args:
101 file_obj: file object
102 extents: sequence of block extents (offset and length)
103 block_size: size of each block
104 max_length: maximum length to read (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700105
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800106 Returns:
107 A character array containing the concatenated read data.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800108 """
Andrew8a1de4b2019-11-23 20:32:35 -0800109 data = array.array('B')
Gilad Arnold272a4992013-05-08 13:12:53 -0700110 if max_length < 0:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800111 max_length = sys.maxsize
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800112 for ex in extents:
113 if max_length == 0:
114 break
Gilad Arnold272a4992013-05-08 13:12:53 -0700115 read_length = min(max_length, ex.num_blocks * block_size)
Gilad Arnold658185a2013-05-08 17:57:54 -0700116
Amin Hassani55c75412019-10-07 11:20:39 -0700117 file_obj.seek(ex.start_block * block_size)
118 data.fromfile(file_obj, read_length)
Gilad Arnold658185a2013-05-08 17:57:54 -0700119
Gilad Arnold272a4992013-05-08 13:12:53 -0700120 max_length -= read_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700121
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800122 return data
123
124
125def _WriteExtents(file_obj, data, extents, block_size, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700126 """Writes data to file as defined by extent sequence.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800127
128 This tries to be efficient by not copy data as it is written in chunks.
129
130 Args:
131 file_obj: file object
132 data: data to write
133 extents: sequence of block extents (offset and length)
134 block_size: size of each block
Gilad Arnold272a4992013-05-08 13:12:53 -0700135 base_name: name string of extent sequence for error reporting
Allie Wood12f59aa2015-04-06 11:05:12 -0700136
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800137 Raises:
138 PayloadError when things don't add up.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800139 """
140 data_offset = 0
141 data_length = len(data)
142 for ex, ex_name in common.ExtentIter(extents, base_name):
Gilad Arnold272a4992013-05-08 13:12:53 -0700143 if not data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800144 raise PayloadError('%s: more write extents than data' % ex_name)
Gilad Arnold272a4992013-05-08 13:12:53 -0700145 write_length = min(data_length, ex.num_blocks * block_size)
Amin Hassani55c75412019-10-07 11:20:39 -0700146 file_obj.seek(ex.start_block * block_size)
Andrew4b00ae12019-11-25 09:37:27 -0800147 file_obj.write(data[data_offset:(data_offset + write_length)])
Gilad Arnold658185a2013-05-08 17:57:54 -0700148
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800149 data_offset += write_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700150 data_length -= write_length
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800151
Gilad Arnold272a4992013-05-08 13:12:53 -0700152 if data_length:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800153 raise PayloadError('%s: more data than write extents' % base_name)
154
155
Gilad Arnold272a4992013-05-08 13:12:53 -0700156def _ExtentsToBspatchArg(extents, block_size, base_name, data_length=-1):
157 """Translates an extent sequence into a bspatch-compatible string argument.
158
159 Args:
160 extents: sequence of block extents (offset and length)
161 block_size: size of each block
162 base_name: name string of extent sequence for error reporting
163 data_length: the actual total length of the data in bytes (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700164
Gilad Arnold272a4992013-05-08 13:12:53 -0700165 Returns:
166 A tuple consisting of (i) a string of the form
167 "off_1:len_1,...,off_n:len_n", (ii) an offset where zero padding is needed
168 for filling the last extent, (iii) the length of the padding (zero means no
169 padding is needed and the extents cover the full length of data).
Allie Wood12f59aa2015-04-06 11:05:12 -0700170
Gilad Arnold272a4992013-05-08 13:12:53 -0700171 Raises:
172 PayloadError if data_length is too short or too long.
Gilad Arnold272a4992013-05-08 13:12:53 -0700173 """
174 arg = ''
175 pad_off = pad_len = 0
176 if data_length < 0:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800177 data_length = sys.maxsize
Gilad Arnold272a4992013-05-08 13:12:53 -0700178 for ex, ex_name in common.ExtentIter(extents, base_name):
179 if not data_length:
180 raise PayloadError('%s: more extents than total data length' % ex_name)
Gilad Arnold658185a2013-05-08 17:57:54 -0700181
Amin Hassani55c75412019-10-07 11:20:39 -0700182 start_byte = ex.start_block * block_size
Gilad Arnold272a4992013-05-08 13:12:53 -0700183 num_bytes = ex.num_blocks * block_size
184 if data_length < num_bytes:
Gilad Arnold658185a2013-05-08 17:57:54 -0700185 # We're only padding a real extent.
Amin Hassani55c75412019-10-07 11:20:39 -0700186 pad_off = start_byte + data_length
187 pad_len = num_bytes - data_length
Gilad Arnold272a4992013-05-08 13:12:53 -0700188 num_bytes = data_length
Gilad Arnold658185a2013-05-08 17:57:54 -0700189
Gilad Arnold272a4992013-05-08 13:12:53 -0700190 arg += '%s%d:%d' % (arg and ',', start_byte, num_bytes)
191 data_length -= num_bytes
192
193 if data_length:
194 raise PayloadError('%s: extents not covering full data length' % base_name)
195
196 return arg, pad_off, pad_len
197
198
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800199#
200# Payload application.
201#
202class PayloadApplier(object):
203 """Applying an update payload.
204
205 This is a short-lived object whose purpose is to isolate the logic used for
206 applying an update payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800207 """
208
Gilad Arnold21a02502013-08-22 16:59:48 -0700209 def __init__(self, payload, bsdiff_in_place=True, bspatch_path=None,
Amin Hassani5ef5d452017-08-04 13:10:59 -0700210 puffpatch_path=None, truncate_to_expected_size=True):
Gilad Arnold272a4992013-05-08 13:12:53 -0700211 """Initialize the applier.
212
213 Args:
214 payload: the payload object to check
215 bsdiff_in_place: whether to perform BSDIFF operation in-place (optional)
Gilad Arnold21a02502013-08-22 16:59:48 -0700216 bspatch_path: path to the bspatch binary (optional)
Amin Hassani5ef5d452017-08-04 13:10:59 -0700217 puffpatch_path: path to the puffpatch binary (optional)
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700218 truncate_to_expected_size: whether to truncate the resulting partitions
219 to their expected sizes, as specified in the
220 payload (optional)
Gilad Arnold272a4992013-05-08 13:12:53 -0700221 """
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800222 assert payload.is_init, 'uninitialized update payload'
223 self.payload = payload
224 self.block_size = payload.manifest.block_size
Allie Wood12f59aa2015-04-06 11:05:12 -0700225 self.minor_version = payload.manifest.minor_version
Gilad Arnold272a4992013-05-08 13:12:53 -0700226 self.bsdiff_in_place = bsdiff_in_place
Gilad Arnold21a02502013-08-22 16:59:48 -0700227 self.bspatch_path = bspatch_path or 'bspatch'
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700228 self.puffpatch_path = puffpatch_path or 'puffin'
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700229 self.truncate_to_expected_size = truncate_to_expected_size
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800230
231 def _ApplyReplaceOperation(self, op, op_name, out_data, part_file, part_size):
Amin Hassani0de7f782017-12-07 12:13:03 -0800232 """Applies a REPLACE{,_BZ,_XZ} operation.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800233
234 Args:
235 op: the operation object
236 op_name: name string for error reporting
237 out_data: the data to be written
238 part_file: the partition file object
239 part_size: the size of the partition
Allie Wood12f59aa2015-04-06 11:05:12 -0700240
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800241 Raises:
242 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800243 """
244 block_size = self.block_size
245 data_length = len(out_data)
246
247 # Decompress data if needed.
248 if op.type == common.OpType.REPLACE_BZ:
249 out_data = bz2.decompress(out_data)
250 data_length = len(out_data)
Amin Hassani0de7f782017-12-07 12:13:03 -0800251 elif op.type == common.OpType.REPLACE_XZ:
252 # pylint: disable=no-member
253 out_data = lzma.decompress(out_data)
254 data_length = len(out_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800255
256 # Write data to blocks specified in dst extents.
257 data_start = 0
258 for ex, ex_name in common.ExtentIter(op.dst_extents,
259 '%s.dst_extents' % op_name):
260 start_block = ex.start_block
261 num_blocks = ex.num_blocks
262 count = num_blocks * block_size
263
Amin Hassani55c75412019-10-07 11:20:39 -0700264 data_end = data_start + count
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800265
Amin Hassani55c75412019-10-07 11:20:39 -0700266 # Make sure we're not running past partition boundary.
267 if (start_block + num_blocks) * block_size > part_size:
268 raise PayloadError(
269 '%s: extent (%s) exceeds partition size (%d)' %
270 (ex_name, common.FormatExtent(ex, block_size),
271 part_size))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800272
Amin Hassani55c75412019-10-07 11:20:39 -0700273 # Make sure that we have enough data to write.
274 if data_end >= data_length + block_size:
275 raise PayloadError(
276 '%s: more dst blocks than data (even with padding)')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800277
Amin Hassani55c75412019-10-07 11:20:39 -0700278 # Pad with zeros if necessary.
279 if data_end > data_length:
280 padding = data_end - data_length
Andrew4b00ae12019-11-25 09:37:27 -0800281 out_data += b'\0' * padding
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800282
Amin Hassani55c75412019-10-07 11:20:39 -0700283 self.payload.payload_file.seek(start_block * block_size)
284 part_file.seek(start_block * block_size)
285 part_file.write(out_data[data_start:data_end])
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800286
287 data_start += count
288
289 # Make sure we wrote all data.
290 if data_start < data_length:
291 raise PayloadError('%s: wrote fewer bytes (%d) than expected (%d)' %
292 (op_name, data_start, data_length))
293
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700294 def _ApplyZeroOperation(self, op, op_name, part_file):
295 """Applies a ZERO operation.
296
297 Args:
298 op: the operation object
299 op_name: name string for error reporting
300 part_file: the partition file object
301
302 Raises:
303 PayloadError if something goes wrong.
304 """
305 block_size = self.block_size
306 base_name = '%s.dst_extents' % op_name
307
308 # Iterate over the extents and write zero.
Amin Hassanib05a65a2017-12-18 15:15:32 -0800309 # pylint: disable=unused-variable
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700310 for ex, ex_name in common.ExtentIter(op.dst_extents, base_name):
Amin Hassani55c75412019-10-07 11:20:39 -0700311 part_file.seek(ex.start_block * block_size)
Andrew4b00ae12019-11-25 09:37:27 -0800312 part_file.write(b'\0' * (ex.num_blocks * block_size))
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700313
Allie Wood12f59aa2015-04-06 11:05:12 -0700314 def _ApplySourceCopyOperation(self, op, op_name, old_part_file,
315 new_part_file):
316 """Applies a SOURCE_COPY operation.
317
318 Args:
319 op: the operation object
320 op_name: name string for error reporting
321 old_part_file: the old partition file object
322 new_part_file: the new partition file object
323
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800324 Raises:
325 PayloadError if something goes wrong.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800326 """
Allie Wood12f59aa2015-04-06 11:05:12 -0700327 if not old_part_file:
328 raise PayloadError(
329 '%s: no source partition file provided for operation type (%d)' %
330 (op_name, op.type))
331
332 block_size = self.block_size
333
334 # Gather input raw data from src extents.
335 in_data = _ReadExtents(old_part_file, op.src_extents, block_size)
336
337 # Dump extracted data to dst extents.
338 _WriteExtents(new_part_file, in_data, op.dst_extents, block_size,
339 '%s.dst_extents' % op_name)
340
Amin Hassaniefa62d92017-11-09 13:46:56 -0800341 def _BytesInExtents(self, extents, base_name):
342 """Counts the length of extents in bytes.
343
344 Args:
345 extents: The list of Extents.
346 base_name: For error reporting.
347
348 Returns:
349 The number of bytes in extents.
350 """
351
352 length = 0
Amin Hassanib05a65a2017-12-18 15:15:32 -0800353 # pylint: disable=unused-variable
Amin Hassaniefa62d92017-11-09 13:46:56 -0800354 for ex, ex_name in common.ExtentIter(extents, base_name):
355 length += ex.num_blocks * self.block_size
356 return length
357
Sen Jiang92161a72016-06-28 16:09:38 -0700358 def _ApplyDiffOperation(self, op, op_name, patch_data, old_part_file,
359 new_part_file):
Amin Hassaniefa62d92017-11-09 13:46:56 -0800360 """Applies a SOURCE_BSDIFF, BROTLI_BSDIFF or PUFFDIFF operation.
Allie Wood12f59aa2015-04-06 11:05:12 -0700361
362 Args:
363 op: the operation object
364 op_name: name string for error reporting
365 patch_data: the binary patch content
366 old_part_file: the source partition file object
367 new_part_file: the target partition file object
368
369 Raises:
370 PayloadError if something goes wrong.
Allie Wood12f59aa2015-04-06 11:05:12 -0700371 """
372 if not old_part_file:
373 raise PayloadError(
374 '%s: no source partition file provided for operation type (%d)' %
375 (op_name, op.type))
376
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800377 block_size = self.block_size
378
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800379 # Dump patch data to file.
380 with tempfile.NamedTemporaryFile(delete=False) as patch_file:
381 patch_file_name = patch_file.name
382 patch_file.write(patch_data)
383
Allie Wood12f59aa2015-04-06 11:05:12 -0700384 if (hasattr(new_part_file, 'fileno') and
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700385 ((not old_part_file) or hasattr(old_part_file, 'fileno'))):
Gilad Arnold272a4992013-05-08 13:12:53 -0700386 # Construct input and output extents argument for bspatch.
Amin Hassaniefa62d92017-11-09 13:46:56 -0800387
Gilad Arnold272a4992013-05-08 13:12:53 -0700388 in_extents_arg, _, _ = _ExtentsToBspatchArg(
389 op.src_extents, block_size, '%s.src_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800390 data_length=op.src_length if op.src_length else
391 self._BytesInExtents(op.src_extents, "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700392 out_extents_arg, pad_off, pad_len = _ExtentsToBspatchArg(
393 op.dst_extents, block_size, '%s.dst_extents' % op_name,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800394 data_length=op.dst_length if op.dst_length else
395 self._BytesInExtents(op.dst_extents, "%s.dst_extents"))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800396
Allie Wood12f59aa2015-04-06 11:05:12 -0700397 new_file_name = '/dev/fd/%d' % new_part_file.fileno()
398 # Diff from source partition.
399 old_file_name = '/dev/fd/%d' % old_part_file.fileno()
400
Andrew3dfd8032019-11-26 12:50:57 -0800401 # In python3, file descriptors(fd) are not passed to child processes by
402 # default. To pass the fds to the child processes, we need to set the flag
403 # 'inheritable' in the fds and make the subprocess calls with the argument
404 # close_fds set to False.
405 if sys.version_info.major >= 3:
406 os.set_inheritable(new_part_file.fileno(), True)
407 os.set_inheritable(old_part_file.fileno(), True)
408
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700409 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700410 # Invoke bspatch on partition file with extents args.
411 bspatch_cmd = [self.bspatch_path, old_file_name, new_file_name,
412 patch_file_name, in_extents_arg, out_extents_arg]
Andrew3dfd8032019-11-26 12:50:57 -0800413 subprocess.check_call(bspatch_cmd, close_fds=False)
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700414 elif op.type == common.OpType.PUFFDIFF:
415 # Invoke puffpatch on partition file with extents args.
416 puffpatch_cmd = [self.puffpatch_path,
417 "--operation=puffpatch",
418 "--src_file=%s" % old_file_name,
419 "--dst_file=%s" % new_file_name,
420 "--patch_file=%s" % patch_file_name,
421 "--src_extents=%s" % in_extents_arg,
422 "--dst_extents=%s" % out_extents_arg]
Andrew3dfd8032019-11-26 12:50:57 -0800423 subprocess.check_call(puffpatch_cmd, close_fds=False)
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700424 else:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800425 raise PayloadError("Unknown operation %s" % op.type)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800426
Gilad Arnold272a4992013-05-08 13:12:53 -0700427 # Pad with zeros past the total output length.
428 if pad_len:
Allie Wood12f59aa2015-04-06 11:05:12 -0700429 new_part_file.seek(pad_off)
Andrew4b00ae12019-11-25 09:37:27 -0800430 new_part_file.write(b'\0' * pad_len)
Gilad Arnold272a4992013-05-08 13:12:53 -0700431 else:
432 # Gather input raw data and write to a temp file.
Allie Wood12f59aa2015-04-06 11:05:12 -0700433 input_part_file = old_part_file if old_part_file else new_part_file
434 in_data = _ReadExtents(input_part_file, op.src_extents, block_size,
Amin Hassaniefa62d92017-11-09 13:46:56 -0800435 max_length=op.src_length if op.src_length else
436 self._BytesInExtents(op.src_extents,
437 "%s.src_extents"))
Gilad Arnold272a4992013-05-08 13:12:53 -0700438 with tempfile.NamedTemporaryFile(delete=False) as in_file:
439 in_file_name = in_file.name
440 in_file.write(in_data)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800441
Allie Wood12f59aa2015-04-06 11:05:12 -0700442 # Allocate temporary output file.
Gilad Arnold272a4992013-05-08 13:12:53 -0700443 with tempfile.NamedTemporaryFile(delete=False) as out_file:
444 out_file_name = out_file.name
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800445
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700446 if op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.BROTLI_BSDIFF):
Amin Hassanicdeb6e62017-10-11 10:15:11 -0700447 # Invoke bspatch.
448 bspatch_cmd = [self.bspatch_path, in_file_name, out_file_name,
449 patch_file_name]
450 subprocess.check_call(bspatch_cmd)
451 elif op.type == common.OpType.PUFFDIFF:
452 # Invoke puffpatch.
453 puffpatch_cmd = [self.puffpatch_path,
454 "--operation=puffpatch",
455 "--src_file=%s" % in_file_name,
456 "--dst_file=%s" % out_file_name,
457 "--patch_file=%s" % patch_file_name]
458 subprocess.check_call(puffpatch_cmd)
459 else:
Andrew Lassalle165843c2019-11-05 13:30:34 -0800460 raise PayloadError("Unknown operation %s" % op.type)
Gilad Arnold272a4992013-05-08 13:12:53 -0700461
462 # Read output.
463 with open(out_file_name, 'rb') as out_file:
464 out_data = out_file.read()
465 if len(out_data) != op.dst_length:
466 raise PayloadError(
467 '%s: actual patched data length (%d) not as expected (%d)' %
468 (op_name, len(out_data), op.dst_length))
469
470 # Write output back to partition, with padding.
471 unaligned_out_len = len(out_data) % block_size
472 if unaligned_out_len:
Andrew4b00ae12019-11-25 09:37:27 -0800473 out_data += b'\0' * (block_size - unaligned_out_len)
Allie Wood12f59aa2015-04-06 11:05:12 -0700474 _WriteExtents(new_part_file, out_data, op.dst_extents, block_size,
Gilad Arnold272a4992013-05-08 13:12:53 -0700475 '%s.dst_extents' % op_name)
476
477 # Delete input/output files.
478 os.remove(in_file_name)
479 os.remove(out_file_name)
480
481 # Delete patch file.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800482 os.remove(patch_file_name)
483
Allie Wood12f59aa2015-04-06 11:05:12 -0700484 def _ApplyOperations(self, operations, base_name, old_part_file,
485 new_part_file, part_size):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800486 """Applies a sequence of update operations to a partition.
487
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800488 Args:
489 operations: the sequence of operations
490 base_name: the name of the operation sequence
Allie Wood12f59aa2015-04-06 11:05:12 -0700491 old_part_file: the old partition file object, open for reading/writing
492 new_part_file: the new partition file object, open for reading/writing
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800493 part_size: the partition size
Allie Wood12f59aa2015-04-06 11:05:12 -0700494
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800495 Raises:
496 PayloadError if anything goes wrong while processing the payload.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800497 """
498 for op, op_name in common.OperationIter(operations, base_name):
499 # Read data blob.
500 data = self.payload.ReadDataBlob(op.data_offset, op.data_length)
501
Amin Hassani0de7f782017-12-07 12:13:03 -0800502 if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ,
503 common.OpType.REPLACE_XZ):
Allie Wood12f59aa2015-04-06 11:05:12 -0700504 self._ApplyReplaceOperation(op, op_name, data, new_part_file, part_size)
Amin Hassani8ad22ba2017-10-11 10:15:11 -0700505 elif op.type == common.OpType.ZERO:
506 self._ApplyZeroOperation(op, op_name, new_part_file)
Allie Wood12f59aa2015-04-06 11:05:12 -0700507 elif op.type == common.OpType.SOURCE_COPY:
508 self._ApplySourceCopyOperation(op, op_name, old_part_file,
509 new_part_file)
Amin Hassaniefa62d92017-11-09 13:46:56 -0800510 elif op.type in (common.OpType.SOURCE_BSDIFF, common.OpType.PUFFDIFF,
511 common.OpType.BROTLI_BSDIFF):
Sen Jiang92161a72016-06-28 16:09:38 -0700512 self._ApplyDiffOperation(op, op_name, data, old_part_file,
513 new_part_file)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800514 else:
515 raise PayloadError('%s: unknown operation type (%d)' %
516 (op_name, op.type))
517
518 def _ApplyToPartition(self, operations, part_name, base_name,
Gilad Arnold16416602013-05-04 21:40:39 -0700519 new_part_file_name, new_part_info,
520 old_part_file_name=None, old_part_info=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800521 """Applies an update to a partition.
522
523 Args:
524 operations: the sequence of update operations to apply
525 part_name: the name of the partition, for error reporting
526 base_name: the name of the operation sequence
Gilad Arnold16416602013-05-04 21:40:39 -0700527 new_part_file_name: file name to write partition data to
528 new_part_info: size and expected hash of dest partition
529 old_part_file_name: file name of source partition (optional)
530 old_part_info: size and expected hash of source partition (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700531
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800532 Raises:
533 PayloadError if anything goes wrong with the update.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800534 """
535 # Do we have a source partition?
Gilad Arnold16416602013-05-04 21:40:39 -0700536 if old_part_file_name:
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800537 # Verify the source partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700538 with open(old_part_file_name, 'rb') as old_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700539 _VerifySha256(old_part_file, old_part_info.hash,
540 'old ' + part_name, length=old_part_info.size)
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700541 new_part_file_mode = 'r+b'
Amin Hassani0f59a9a2019-09-27 10:24:31 -0700542 open(new_part_file_name, 'w').close()
543
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800544 else:
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700545 # We need to create/truncate the dst partition file.
546 new_part_file_mode = 'w+b'
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800547
548 # Apply operations.
Gilad Arnoldf69065c2013-05-27 16:54:59 -0700549 with open(new_part_file_name, new_part_file_mode) as new_part_file:
Allie Wood12f59aa2015-04-06 11:05:12 -0700550 old_part_file = (open(old_part_file_name, 'r+b')
551 if old_part_file_name else None)
552 try:
553 self._ApplyOperations(operations, base_name, old_part_file,
554 new_part_file, new_part_info.size)
555 finally:
556 if old_part_file:
557 old_part_file.close()
558
Gilad Arnolde5fdf182013-05-23 16:13:38 -0700559 # Truncate the result, if so instructed.
560 if self.truncate_to_expected_size:
561 new_part_file.seek(0, 2)
562 if new_part_file.tell() > new_part_info.size:
563 new_part_file.seek(new_part_info.size)
564 new_part_file.truncate()
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800565
566 # Verify the resulting partition.
Gilad Arnold16416602013-05-04 21:40:39 -0700567 with open(new_part_file_name, 'rb') as new_part_file:
Gilad Arnold4b8f4c22015-07-16 11:45:39 -0700568 _VerifySha256(new_part_file, new_part_info.hash,
569 'new ' + part_name, length=new_part_info.size)
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800570
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700571 def Run(self, new_parts, old_parts=None):
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800572 """Applier entry point, invoking all update operations.
573
574 Args:
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700575 new_parts: map of partition name to dest partition file
576 old_parts: map of partition name to source partition file (optional)
Allie Wood12f59aa2015-04-06 11:05:12 -0700577
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800578 Raises:
579 PayloadError if payload application failed.
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800580 """
Tudor Brindusacd20392018-06-19 11:46:16 -0700581 if old_parts is None:
582 old_parts = {}
583
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800584 self.payload.ResetFile()
585
Tudor Brindusacd20392018-06-19 11:46:16 -0700586 new_part_info = {}
587 old_part_info = {}
588 install_operations = []
589
590 manifest = self.payload.manifest
Amin Hassani55c75412019-10-07 11:20:39 -0700591 for part in manifest.partitions:
592 name = part.partition_name
593 new_part_info[name] = part.new_partition_info
594 old_part_info[name] = part.old_partition_info
595 install_operations.append((name, part.operations))
Tudor Brindusacd20392018-06-19 11:46:16 -0700596
597 part_names = set(new_part_info.keys()) # Equivalently, old_part_info.keys()
Tudor Brindus2d22c1a2018-06-15 13:07:13 -0700598
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800599 # Make sure the arguments are sane and match the payload.
Tudor Brindusacd20392018-06-19 11:46:16 -0700600 new_part_names = set(new_parts.keys())
601 if new_part_names != part_names:
602 raise PayloadError('missing dst partition(s) %s' %
603 ', '.join(part_names - new_part_names))
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800604
Tudor Brindusacd20392018-06-19 11:46:16 -0700605 old_part_names = set(old_parts.keys())
606 if part_names - old_part_names:
607 if self.payload.IsDelta():
608 raise PayloadError('trying to apply a delta update without src '
609 'partition(s) %s' %
610 ', '.join(part_names - old_part_names))
611 elif old_part_names == part_names:
612 if self.payload.IsFull():
613 raise PayloadError('trying to apply a full update onto src partitions')
Gilad Arnold553b0ec2013-01-26 01:00:39 -0800614 else:
615 raise PayloadError('not all src partitions provided')
616
Tudor Brindusacd20392018-06-19 11:46:16 -0700617 for name, operations in install_operations:
618 # Apply update to partition.
619 self._ApplyToPartition(
620 operations, name, '%s_install_operations' % name, new_parts[name],
621 new_part_info[name], old_parts.get(name, None), old_part_info[name])