1#!/usr/bin/env python
2#
3# Copyright (C) 2008 The Android Open Source Project
4#
5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9#      http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS,
13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16
17"""
18Given a target-files zipfile, produces an OTA package that installs that build.
19An incremental OTA is produced if -i is given, otherwise a full OTA is produced.
20
21Usage:  ota_from_target_files [options] input_target_files output_ota_package
22
23Common options that apply to both of non-A/B and A/B OTAs
24
25  --downgrade
26      Intentionally generate an incremental OTA that updates from a newer build
27      to an older one (e.g. downgrading from P preview back to O MR1).
28      "ota-downgrade=yes" will be set in the package metadata file. A data wipe
29      will always be enforced when using this flag, so "ota-wipe=yes" will also
30      be included in the metadata file. The update-binary in the source build
31      will be used in the OTA package, unless --binary flag is specified. Please
32      also check the comment for --override_timestamp below.
33
34  -i  (--incremental_from) <file>
35      Generate an incremental OTA using the given target-files zip as the
36      starting build.
37
38  -k  (--package_key) <key>
39      Key to use to sign the package (default is the value of
40      default_system_dev_certificate from the input target-files's
41      META/misc_info.txt, or "build/make/target/product/security/testkey" if
42      that value is not specified).
43
44      For incremental OTAs, the default value is based on the source
45      target-file, not the target build.
46
47  --override_timestamp
48      Intentionally generate an incremental OTA that updates from a newer build
49      to an older one (based on timestamp comparison), by setting the downgrade
50      flag in the package metadata. This differs from --downgrade flag, as we
51      don't enforce a data wipe with this flag. Because we know for sure this is
52      NOT an actual downgrade case, but two builds happen to be cut in a reverse
53      order (e.g. from two branches). A legit use case is that we cut a new
54      build C (after having A and B), but want to enfore an update path of A ->
55      C -> B. Specifying --downgrade may not help since that would enforce a
56      data wipe for C -> B update.
57
58      We used to set a fake timestamp in the package metadata for this flow. But
59      now we consolidate the two cases (i.e. an actual downgrade, or a downgrade
60      based on timestamp) with the same "ota-downgrade=yes" flag, with the
61      difference being whether "ota-wipe=yes" is set.
62
63  --wipe_user_data
64      Generate an OTA package that will wipe the user data partition when
65      installed.
66
67  --retrofit_dynamic_partitions
68      Generates an OTA package that updates a device to support dynamic
69      partitions (default False). This flag is implied when generating
70      an incremental OTA where the base build does not support dynamic
71      partitions but the target build does. For A/B, when this flag is set,
72      --skip_postinstall is implied.
73
74  --skip_compatibility_check
75      Skip checking compatibility of the input target files package.
76
77  --output_metadata_path
78      Write a copy of the metadata to a separate file. Therefore, users can
79      read the post build fingerprint without extracting the OTA package.
80
81  --force_non_ab
82      This flag can only be set on an A/B device that also supports non-A/B
83      updates. Implies --two_step.
84      If set, generate that non-A/B update package.
85      If not set, generates A/B package for A/B device and non-A/B package for
86      non-A/B device.
87
88  -o  (--oem_settings) <main_file[,additional_files...]>
89      Comma separated list of files used to specify the expected OEM-specific
90      properties on the OEM partition of the intended device. Multiple expected
91      values can be used by providing multiple files. Only the first dict will
92      be used to compute fingerprint, while the rest will be used to assert
93      OEM-specific properties.
94
95Non-A/B OTA specific options
96
97  -b  (--binary) <file>
98      Use the given binary as the update-binary in the output package, instead
99      of the binary in the build's target_files. Use for development only.
100
101  --block
102      Generate a block-based OTA for non-A/B device. We have deprecated the
103      support for file-based OTA since O. Block-based OTA will be used by
104      default for all non-A/B devices. Keeping this flag here to not break
105      existing callers.
106
107  -e  (--extra_script) <file>
108      Insert the contents of file at the end of the update script.
109
110  --full_bootloader
111      Similar to --full_radio. When generating an incremental OTA, always
112      include a full copy of bootloader image.
113
114  --full_radio
115      When generating an incremental OTA, always include a full copy of radio
116      image. This option is only meaningful when -i is specified, because a full
117      radio is always included in a full OTA if applicable.
118
119  --log_diff <file>
120      Generate a log file that shows the differences in the source and target
121      builds for an incremental package. This option is only meaningful when -i
122      is specified.
123
124  --oem_no_mount
125      For devices with OEM-specific properties but without an OEM partition, do
126      not mount the OEM partition in the updater-script. This should be very
127      rarely used, since it's expected to have a dedicated OEM partition for
128      OEM-specific properties. Only meaningful when -o is specified.
129
130  --stash_threshold <float>
131      Specify the threshold that will be used to compute the maximum allowed
132      stash size (defaults to 0.8).
133
134  -t  (--worker_threads) <int>
135      Specify the number of worker-threads that will be used when generating
136      patches for incremental updates (defaults to 3).
137
138  --verify
139      Verify the checksums of the updated system and vendor (if any) partitions.
140      Non-A/B incremental OTAs only.
141
142  -2  (--two_step)
143      Generate a 'two-step' OTA package, where recovery is updated first, so
144      that any changes made to the system partition are done using the new
145      recovery (new kernel, etc.).
146
147A/B OTA specific options
148
149  --disable_fec_computation
150      Disable the on device FEC data computation for incremental updates.
151
152  --include_secondary
153      Additionally include the payload for secondary slot images (default:
154      False). Only meaningful when generating A/B OTAs.
155
156      By default, an A/B OTA package doesn't contain the images for the
157      secondary slot (e.g. system_other.img). Specifying this flag allows
158      generating a separate payload that will install secondary slot images.
159
160      Such a package needs to be applied in a two-stage manner, with a reboot
161      in-between. During the first stage, the updater applies the primary
162      payload only. Upon finishing, it reboots the device into the newly updated
163      slot. It then continues to install the secondary payload to the inactive
164      slot, but without switching the active slot at the end (needs the matching
165      support in update_engine, i.e. SWITCH_SLOT_ON_REBOOT flag).
166
167      Due to the special install procedure, the secondary payload will be always
168      generated as a full payload.
169
170  --payload_signer <signer>
171      Specify the signer when signing the payload and metadata for A/B OTAs.
172      By default (i.e. without this flag), it calls 'openssl pkeyutl' to sign
173      with the package private key. If the private key cannot be accessed
174      directly, a payload signer that knows how to do that should be specified.
175      The signer will be supplied with "-inkey <path_to_key>",
176      "-in <input_file>" and "-out <output_file>" parameters.
177
178  --payload_signer_args <args>
179      Specify the arguments needed for payload signer.
180
181  --payload_signer_maximum_signature_size <signature_size>
182      The maximum signature size (in bytes) that would be generated by the given
183      payload signer. Only meaningful when custom payload signer is specified
184      via '--payload_signer'.
185      If the signer uses a RSA key, this should be the number of bytes to
186      represent the modulus. If it uses an EC key, this is the size of a
187      DER-encoded ECDSA signature.
188
189  --payload_signer_key_size <key_size>
190      Deprecated. Use the '--payload_signer_maximum_signature_size' instead.
191
192  --boot_variable_file <path>
193      A file that contains the possible values of ro.boot.* properties. It's
194      used to calculate the possible runtime fingerprints when some
195      ro.product.* properties are overridden by the 'import' statement.
196      The file expects one property per line, and each line has the following
197      format: 'prop_name=value1,value2'. e.g. 'ro.boot.product.sku=std,pro'
198
199  --skip_postinstall
200      Skip the postinstall hooks when generating an A/B OTA package (default:
201      False). Note that this discards ALL the hooks, including non-optional
202      ones. Should only be used if caller knows it's safe to do so (e.g. all the
203      postinstall work is to dexopt apps and a data wipe will happen immediately
204      after). Only meaningful when generating A/B OTAs.
205
206  --partial "<PARTITION> [<PARTITION>[...]]"
207      Generate partial updates, overriding ab_partitions list with the given
208      list.
209
210  --custom_image <custom_partition=custom_image>
211      Use the specified custom_image to update custom_partition when generating
212      an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
213      cus=cus_test.img"
214
215  --disable_vabc
216      Disable Virtual A/B Compression, for builds that have compression enabled
217      by default.
218
219  --vabc_downgrade
220      Don't disable Virtual A/B Compression for downgrading OTAs.
221      For VABC downgrades, we must finish merging before doing data wipe, and
222      since data wipe is required for downgrading OTA, this might cause long
223      wait time in recovery.
224"""
225
226from __future__ import print_function
227
228import logging
229import multiprocessing
230import os
231import os.path
232import re
233import shlex
234import shutil
235import struct
236import subprocess
237import sys
238import zipfile
239
240import common
241import ota_utils
242from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
243                       PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME)
244import target_files_diff
245from check_target_files_vintf import CheckVintfIfTrebleEnabled
246from non_ab_ota import GenerateNonAbOtaPackage
247
248if sys.hexversion < 0x02070000:
249  print("Python 2.7 or newer is required.", file=sys.stderr)
250  sys.exit(1)
251
252logger = logging.getLogger(__name__)
253
254OPTIONS = ota_utils.OPTIONS
255OPTIONS.verify = False
256OPTIONS.patch_threshold = 0.95
257OPTIONS.wipe_user_data = False
258OPTIONS.extra_script = None
259OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
260if OPTIONS.worker_threads == 0:
261  OPTIONS.worker_threads = 1
262OPTIONS.two_step = False
263OPTIONS.include_secondary = False
264OPTIONS.block_based = True
265OPTIONS.updater_binary = None
266OPTIONS.oem_dicts = None
267OPTIONS.oem_source = None
268OPTIONS.oem_no_mount = False
269OPTIONS.full_radio = False
270OPTIONS.full_bootloader = False
271# Stash size cannot exceed cache_size * threshold.
272OPTIONS.cache_size = None
273OPTIONS.stash_threshold = 0.8
274OPTIONS.log_diff = None
275OPTIONS.payload_signer = None
276OPTIONS.payload_signer_args = []
277OPTIONS.payload_signer_maximum_signature_size = None
278OPTIONS.extracted_input = None
279OPTIONS.skip_postinstall = False
280OPTIONS.skip_compatibility_check = False
281OPTIONS.disable_fec_computation = False
282OPTIONS.disable_verity_computation = False
283OPTIONS.partial = None
284OPTIONS.custom_images = {}
285OPTIONS.disable_vabc = False
286OPTIONS.spl_downgrade = False
287OPTIONS.vabc_downgrade = False
288
289POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
290DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
291AB_PARTITIONS = 'META/ab_partitions.txt'
292
293# Files to be unzipped for target diffing purpose.
294TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*',
295                                'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*',
296                                'VENDOR_DLKM/*', 'ODM_DLKM/*']
297RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS]
298
299# Images to be excluded from secondary payload. We essentially only keep
300# 'system_other' and bootloader partitions.
301SECONDARY_PAYLOAD_SKIPPED_IMAGES = [
302    'boot', 'dtbo', 'modem', 'odm', 'odm_dlkm', 'product', 'radio', 'recovery',
303    'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor', 'vendor',
304    'vendor_boot']
305
306
307class PayloadSigner(object):
308  """A class that wraps the payload signing works.
309
310  When generating a Payload, hashes of the payload and metadata files will be
311  signed with the device key, either by calling an external payload signer or
312  by calling openssl with the package key. This class provides a unified
313  interface, so that callers can just call PayloadSigner.Sign().
314
315  If an external payload signer has been specified (OPTIONS.payload_signer), it
316  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
317  that the signing key should be provided as part of the payload_signer_args.
318  Otherwise without an external signer, it uses the package key
319  (OPTIONS.package_key) and calls openssl for the signing works.
320  """
321
322  def __init__(self):
323    if OPTIONS.payload_signer is None:
324      # Prepare the payload signing key.
325      private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
326      pw = OPTIONS.key_passwords[OPTIONS.package_key]
327
328      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
329      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
330      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
331      cmd.extend(["-out", signing_key])
332      common.RunAndCheckOutput(cmd, verbose=False)
333
334      self.signer = "openssl"
335      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
336                          "-pkeyopt", "digest:sha256"]
337      self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
338          signing_key)
339    else:
340      self.signer = OPTIONS.payload_signer
341      self.signer_args = OPTIONS.payload_signer_args
342      if OPTIONS.payload_signer_maximum_signature_size:
343        self.maximum_signature_size = int(
344            OPTIONS.payload_signer_maximum_signature_size)
345      else:
346        # The legacy config uses RSA2048 keys.
347        logger.warning("The maximum signature size for payload signer is not"
348                       " set, default to 256 bytes.")
349        self.maximum_signature_size = 256
350
351  @staticmethod
352  def _GetMaximumSignatureSizeInBytes(signing_key):
353    out_signature_size_file = common.MakeTempFile("signature_size")
354    cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
355        out_signature_size_file), "--private_key={}".format(signing_key)]
356    common.RunAndCheckOutput(cmd)
357    with open(out_signature_size_file) as f:
358      signature_size = f.read().rstrip()
359    logger.info("%s outputs the maximum signature size: %s", cmd[0],
360                signature_size)
361    return int(signature_size)
362
363  def Sign(self, in_file):
364    """Signs the given input file. Returns the output filename."""
365    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
366    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
367    common.RunAndCheckOutput(cmd)
368    return out_file
369
370
371class Payload(object):
372  """Manages the creation and the signing of an A/B OTA Payload."""
373
374  PAYLOAD_BIN = 'payload.bin'
375  PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
376  SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
377  SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
378
379  def __init__(self, secondary=False):
380    """Initializes a Payload instance.
381
382    Args:
383      secondary: Whether it's generating a secondary payload (default: False).
384    """
385    self.payload_file = None
386    self.payload_properties = None
387    self.secondary = secondary
388
389  def _Run(self, cmd):  # pylint: disable=no-self-use
390    # Don't pipe (buffer) the output if verbose is set. Let
391    # brillo_update_payload write to stdout/stderr directly, so its progress can
392    # be monitored.
393    if OPTIONS.verbose:
394      common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
395    else:
396      common.RunAndCheckOutput(cmd)
397
398  def Generate(self, target_file, source_file=None, additional_args=None):
399    """Generates a payload from the given target-files zip(s).
400
401    Args:
402      target_file: The filename of the target build target-files zip.
403      source_file: The filename of the source build target-files zip; or None if
404          generating a full OTA.
405      additional_args: A list of additional args that should be passed to
406          brillo_update_payload script; or None.
407    """
408    if additional_args is None:
409      additional_args = []
410
411    payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
412    cmd = ["brillo_update_payload", "generate",
413           "--payload", payload_file,
414           "--target_image", target_file]
415    if source_file is not None:
416      cmd.extend(["--source_image", source_file])
417      if OPTIONS.disable_fec_computation:
418        cmd.extend(["--disable_fec_computation", "true"])
419      if OPTIONS.disable_verity_computation:
420        cmd.extend(["--disable_verity_computation", "true"])
421    cmd.extend(additional_args)
422    self._Run(cmd)
423
424    self.payload_file = payload_file
425    self.payload_properties = None
426
427  def Sign(self, payload_signer):
428    """Generates and signs the hashes of the payload and metadata.
429
430    Args:
431      payload_signer: A PayloadSigner() instance that serves the signing work.
432
433    Raises:
434      AssertionError: On any failure when calling brillo_update_payload script.
435    """
436    assert isinstance(payload_signer, PayloadSigner)
437
438    # 1. Generate hashes of the payload and metadata files.
439    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
440    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
441    cmd = ["brillo_update_payload", "hash",
442           "--unsigned_payload", self.payload_file,
443           "--signature_size", str(payload_signer.maximum_signature_size),
444           "--metadata_hash_file", metadata_sig_file,
445           "--payload_hash_file", payload_sig_file]
446    self._Run(cmd)
447
448    # 2. Sign the hashes.
449    signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
450    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
451
452    # 3. Insert the signatures back into the payload file.
453    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
454                                              suffix=".bin")
455    cmd = ["brillo_update_payload", "sign",
456           "--unsigned_payload", self.payload_file,
457           "--payload", signed_payload_file,
458           "--signature_size", str(payload_signer.maximum_signature_size),
459           "--metadata_signature_file", signed_metadata_sig_file,
460           "--payload_signature_file", signed_payload_sig_file]
461    self._Run(cmd)
462
463    # 4. Dump the signed payload properties.
464    properties_file = common.MakeTempFile(prefix="payload-properties-",
465                                          suffix=".txt")
466    cmd = ["brillo_update_payload", "properties",
467           "--payload", signed_payload_file,
468           "--properties_file", properties_file]
469    self._Run(cmd)
470
471    if self.secondary:
472      with open(properties_file, "a") as f:
473        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
474
475    if OPTIONS.wipe_user_data:
476      with open(properties_file, "a") as f:
477        f.write("POWERWASH=1\n")
478
479    self.payload_file = signed_payload_file
480    self.payload_properties = properties_file
481
482  def WriteToZip(self, output_zip):
483    """Writes the payload to the given zip.
484
485    Args:
486      output_zip: The output ZipFile instance.
487    """
488    assert self.payload_file is not None
489    assert self.payload_properties is not None
490
491    if self.secondary:
492      payload_arcname = Payload.SECONDARY_PAYLOAD_BIN
493      payload_properties_arcname = Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT
494    else:
495      payload_arcname = Payload.PAYLOAD_BIN
496      payload_properties_arcname = Payload.PAYLOAD_PROPERTIES_TXT
497
498    # Add the signed payload file and properties into the zip. In order to
499    # support streaming, we pack them as ZIP_STORED. So these entries can be
500    # read directly with the offset and length pairs.
501    common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
502                    compress_type=zipfile.ZIP_STORED)
503    common.ZipWrite(output_zip, self.payload_properties,
504                    arcname=payload_properties_arcname,
505                    compress_type=zipfile.ZIP_STORED)
506
507
508def _LoadOemDicts(oem_source):
509  """Returns the list of loaded OEM properties dict."""
510  if not oem_source:
511    return None
512
513  oem_dicts = []
514  for oem_file in oem_source:
515    with open(oem_file) as fp:
516      oem_dicts.append(common.LoadDictionaryFromLines(fp.readlines()))
517  return oem_dicts
518
519
520class StreamingPropertyFiles(PropertyFiles):
521  """A subclass for computing the property-files for streaming A/B OTAs."""
522
523  def __init__(self):
524    super(StreamingPropertyFiles, self).__init__()
525    self.name = 'ota-streaming-property-files'
526    self.required = (
527        # payload.bin and payload_properties.txt must exist.
528        'payload.bin',
529        'payload_properties.txt',
530    )
531    self.optional = (
532        # care_map is available only if dm-verity is enabled.
533        'care_map.pb',
534        'care_map.txt',
535        # compatibility.zip is available only if target supports Treble.
536        'compatibility.zip',
537    )
538
539
540class AbOtaPropertyFiles(StreamingPropertyFiles):
541  """The property-files for A/B OTA that includes payload_metadata.bin info.
542
543  Since P, we expose one more token (aka property-file), in addition to the ones
544  for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
545  'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
546  doesn't exist as a separate ZIP entry, but can be used to verify if the
547  payload can be applied on the given device.
548
549  For backward compatibility, we keep both of the 'ota-streaming-property-files'
550  and the newly added 'ota-property-files' in P. The new token will only be
551  available in 'ota-property-files'.
552  """
553
554  def __init__(self):
555    super(AbOtaPropertyFiles, self).__init__()
556    self.name = 'ota-property-files'
557
558  def _GetPrecomputed(self, input_zip):
559    offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
560    return ['payload_metadata.bin:{}:{}'.format(offset, size)]
561
562  @staticmethod
563  def _GetPayloadMetadataOffsetAndSize(input_zip):
564    """Computes the offset and size of the payload metadata for a given package.
565
566    (From system/update_engine/update_metadata.proto)
567    A delta update file contains all the deltas needed to update a system from
568    one specific version to another specific version. The update format is
569    represented by this struct pseudocode:
570
571    struct delta_update_file {
572      char magic[4] = "CrAU";
573      uint64 file_format_version;
574      uint64 manifest_size;  // Size of protobuf DeltaArchiveManifest
575
576      // Only present if format_version > 1:
577      uint32 metadata_signature_size;
578
579      // The Bzip2 compressed DeltaArchiveManifest
580      char manifest[metadata_signature_size];
581
582      // The signature of the metadata (from the beginning of the payload up to
583      // this location, not including the signature itself). This is a
584      // serialized Signatures message.
585      char medatada_signature_message[metadata_signature_size];
586
587      // Data blobs for files, no specific format. The specific offset
588      // and length of each data blob is recorded in the DeltaArchiveManifest.
589      struct {
590        char data[];
591      } blobs[];
592
593      // These two are not signed:
594      uint64 payload_signatures_message_size;
595      char payload_signatures_message[];
596    };
597
598    'payload-metadata.bin' contains all the bytes from the beginning of the
599    payload, till the end of 'medatada_signature_message'.
600    """
601    payload_info = input_zip.getinfo('payload.bin')
602    payload_offset = payload_info.header_offset
603    payload_offset += zipfile.sizeFileHeader
604    payload_offset += len(payload_info.extra) + len(payload_info.filename)
605    payload_size = payload_info.file_size
606
607    with input_zip.open('payload.bin') as payload_fp:
608      header_bin = payload_fp.read(24)
609
610    # network byte order (big-endian)
611    header = struct.unpack("!IQQL", header_bin)
612
613    # 'CrAU'
614    magic = header[0]
615    assert magic == 0x43724155, "Invalid magic: {:x}".format(magic)
616
617    manifest_size = header[2]
618    metadata_signature_size = header[3]
619    metadata_total = 24 + manifest_size + metadata_signature_size
620    assert metadata_total < payload_size
621
622    return (payload_offset, metadata_total)
623
624
625def UpdatesInfoForSpecialUpdates(content, partitions_filter,
626                                 delete_keys=None):
627  """ Updates info file for secondary payload generation, partial update, etc.
628
629    Scan each line in the info file, and remove the unwanted partitions from
630    the dynamic partition list in the related properties. e.g.
631    "super_google_dynamic_partitions_partition_list=system vendor product"
632    will become "super_google_dynamic_partitions_partition_list=system".
633
634  Args:
635    content: The content of the input info file. e.g. misc_info.txt.
636    partitions_filter: A function to filter the desired partitions from a given
637      list
638    delete_keys: A list of keys to delete in the info file
639
640  Returns:
641    A string of the updated info content.
642  """
643
644  output_list = []
645  # The suffix in partition_list variables that follows the name of the
646  # partition group.
647  list_suffix = 'partition_list'
648  for line in content.splitlines():
649    if line.startswith('#') or '=' not in line:
650      output_list.append(line)
651      continue
652    key, value = line.strip().split('=', 1)
653
654    if delete_keys and key in delete_keys:
655      pass
656    elif key.endswith(list_suffix):
657      partitions = value.split()
658      # TODO for partial update, partitions in the same group must be all
659      # updated or all omitted
660      partitions = filter(partitions_filter, partitions)
661      output_list.append('{}={}'.format(key, ' '.join(partitions)))
662    else:
663      output_list.append(line)
664  return '\n'.join(output_list)
665
666
667def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
668  """Returns a target-files.zip file for generating secondary payload.
669
670  Although the original target-files.zip already contains secondary slot
671  images (i.e. IMAGES/system_other.img), we need to rename the files to the
672  ones without _other suffix. Note that we cannot instead modify the names in
673  META/ab_partitions.txt, because there are no matching partitions on device.
674
675  For the partitions that don't have secondary images, the ones for primary
676  slot will be used. This is to ensure that we always have valid boot, vbmeta,
677  bootloader images in the inactive slot.
678
679  Args:
680    input_file: The input target-files.zip file.
681    skip_postinstall: Whether to skip copying the postinstall config file.
682
683  Returns:
684    The filename of the target-files.zip for generating secondary payload.
685  """
686
687  def GetInfoForSecondaryImages(info_file):
688    """Updates info file for secondary payload generation."""
689    with open(info_file) as f:
690      content = f.read()
691    # Remove virtual_ab flag from secondary payload so that OTA client
692    # don't use snapshots for secondary update
693    delete_keys = ['virtual_ab', "virtual_ab_retrofit"]
694    return UpdatesInfoForSpecialUpdates(
695        content, lambda p: p not in SECONDARY_PAYLOAD_SKIPPED_IMAGES,
696        delete_keys)
697
698  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
699  target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
700
701  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
702    infolist = input_zip.infolist()
703
704  input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
705  for info in infolist:
706    unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
707    if info.filename == 'IMAGES/system_other.img':
708      common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
709
710    # Primary images and friends need to be skipped explicitly.
711    elif info.filename in ('IMAGES/system.img',
712                           'IMAGES/system.map'):
713      pass
714
715    # Copy images that are not in SECONDARY_PAYLOAD_SKIPPED_IMAGES.
716    elif info.filename.startswith(('IMAGES/', 'RADIO/')):
717      image_name = os.path.basename(info.filename)
718      if image_name not in ['{}.img'.format(partition) for partition in
719                            SECONDARY_PAYLOAD_SKIPPED_IMAGES]:
720        common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
721
722    # Skip copying the postinstall config if requested.
723    elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
724      pass
725
726    elif info.filename.startswith('META/'):
727      # Remove the unnecessary partitions for secondary images from the
728      # ab_partitions file.
729      if info.filename == AB_PARTITIONS:
730        with open(unzipped_file) as f:
731          partition_list = f.read().splitlines()
732        partition_list = [partition for partition in partition_list if partition
733                          and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES]
734        common.ZipWriteStr(target_zip, info.filename,
735                           '\n'.join(partition_list))
736      # Remove the unnecessary partitions from the dynamic partitions list.
737      elif (info.filename == 'META/misc_info.txt' or
738            info.filename == DYNAMIC_PARTITION_INFO):
739        modified_info = GetInfoForSecondaryImages(unzipped_file)
740        common.ZipWriteStr(target_zip, info.filename, modified_info)
741      else:
742        common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
743
744  common.ZipClose(target_zip)
745
746  return target_file
747
748
749def GetTargetFilesZipWithoutPostinstallConfig(input_file):
750  """Returns a target-files.zip that's not containing postinstall_config.txt.
751
752  This allows brillo_update_payload script to skip writing all the postinstall
753  hooks in the generated payload. The input target-files.zip file will be
754  duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
755  contain the postinstall_config.txt entry, the input file will be returned.
756
757  Args:
758    input_file: The input target-files.zip filename.
759
760  Returns:
761    The filename of target-files.zip that doesn't contain postinstall config.
762  """
763  # We should only make a copy if postinstall_config entry exists.
764  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
765    if POSTINSTALL_CONFIG not in input_zip.namelist():
766      return input_file
767
768  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
769  shutil.copyfile(input_file, target_file)
770  common.ZipDelete(target_file, POSTINSTALL_CONFIG)
771  return target_file
772
773
774def ParseInfoDict(target_file_path):
775  with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
776    return common.LoadInfoDict(zfp)
777
778
779def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions):
780  """Returns a target-files.zip for partial ota update package generation.
781
782  This function modifies ab_partitions list with the desired partitions before
783  calling the brillo_update_payload script. It also cleans up the reference to
784  the excluded partitions in the info file, e.g misc_info.txt.
785
786  Args:
787    input_file: The input target-files.zip filename.
788    ab_partitions: A list of partitions to include in the partial update
789
790  Returns:
791    The filename of target-files.zip used for partial ota update.
792  """
793
794  def AddImageForPartition(partition_name):
795    """Add the archive name for a given partition to the copy list."""
796    for prefix in ['IMAGES', 'RADIO']:
797      image_path = '{}/{}.img'.format(prefix, partition_name)
798      if image_path in namelist:
799        copy_entries.append(image_path)
800        map_path = '{}/{}.map'.format(prefix, partition_name)
801        if map_path in namelist:
802          copy_entries.append(map_path)
803        return
804
805    raise ValueError("Cannot find {} in input zipfile".format(partition_name))
806
807  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
808    original_ab_partitions = input_zip.read(
809        AB_PARTITIONS).decode().splitlines()
810    namelist = input_zip.namelist()
811
812  unrecognized_partitions = [partition for partition in ab_partitions if
813                             partition not in original_ab_partitions]
814  if unrecognized_partitions:
815    raise ValueError("Unrecognized partitions when generating partial updates",
816                     unrecognized_partitions)
817
818  logger.info("Generating partial updates for %s", ab_partitions)
819
820  copy_entries = ['META/update_engine_config.txt']
821  for partition_name in ab_partitions:
822    AddImageForPartition(partition_name)
823
824  # Use zip2zip to avoid extracting the zipfile.
825  partial_target_file = common.MakeTempFile(suffix='.zip')
826  cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file]
827  cmd.extend(['{}:{}'.format(name, name) for name in copy_entries])
828  common.RunAndCheckOutput(cmd)
829
830  partial_target_zip = zipfile.ZipFile(partial_target_file, 'a',
831                                       allowZip64=True)
832  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
833    common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
834                       '\n'.join(ab_partitions))
835    for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
836      if info_file not in input_zip.namelist():
837        logger.warning('Cannot find %s in input zipfile', info_file)
838        continue
839      content = input_zip.read(info_file).decode()
840      modified_info = UpdatesInfoForSpecialUpdates(
841          content, lambda p: p in ab_partitions)
842      common.ZipWriteStr(partial_target_zip, info_file, modified_info)
843
844    # TODO(xunchang) handle 'META/care_map.pb', 'META/postinstall_config.txt'
845  common.ZipClose(partial_target_zip)
846
847  return partial_target_file
848
849
850def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
851                                                  super_block_devices,
852                                                  dynamic_partition_list):
853  """Returns a target-files.zip for retrofitting dynamic partitions.
854
855  This allows brillo_update_payload to generate an OTA based on the exact
856  bits on the block devices. Postinstall is disabled.
857
858  Args:
859    input_file: The input target-files.zip filename.
860    super_block_devices: The list of super block devices
861    dynamic_partition_list: The list of dynamic partitions
862
863  Returns:
864    The filename of target-files.zip with *.img replaced with super_*.img for
865    each block device in super_block_devices.
866  """
867  assert super_block_devices, "No super_block_devices are specified."
868
869  replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
870             for dev in super_block_devices}
871
872  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
873  shutil.copyfile(input_file, target_file)
874
875  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
876    namelist = input_zip.namelist()
877
878  input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
879
880  # Remove partitions from META/ab_partitions.txt that is in
881  # dynamic_partition_list but not in super_block_devices so that
882  # brillo_update_payload won't generate update for those logical partitions.
883  ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
884  with open(ab_partitions_file) as f:
885    ab_partitions_lines = f.readlines()
886    ab_partitions = [line.strip() for line in ab_partitions_lines]
887  # Assert that all super_block_devices are in ab_partitions
888  super_device_not_updated = [partition for partition in super_block_devices
889                              if partition not in ab_partitions]
890  assert not super_device_not_updated, \
891      "{} is in super_block_devices but not in {}".format(
892          super_device_not_updated, AB_PARTITIONS)
893  # ab_partitions -= (dynamic_partition_list - super_block_devices)
894  new_ab_partitions = common.MakeTempFile(
895      prefix="ab_partitions", suffix=".txt")
896  with open(new_ab_partitions, 'w') as f:
897    for partition in ab_partitions:
898      if (partition in dynamic_partition_list and
899              partition not in super_block_devices):
900        logger.info("Dropping %s from ab_partitions.txt", partition)
901        continue
902      f.write(partition + "\n")
903  to_delete = [AB_PARTITIONS]
904
905  # Always skip postinstall for a retrofit update.
906  to_delete += [POSTINSTALL_CONFIG]
907
908  # Delete dynamic_partitions_info.txt so that brillo_update_payload thinks this
909  # is a regular update on devices without dynamic partitions support.
910  to_delete += [DYNAMIC_PARTITION_INFO]
911
912  # Remove the existing partition images as well as the map files.
913  to_delete += list(replace.values())
914  to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
915
916  common.ZipDelete(target_file, to_delete)
917
918  target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
919
920  # Write super_{foo}.img as {foo}.img.
921  for src, dst in replace.items():
922    assert src in namelist, \
923        'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
924    unzipped_file = os.path.join(input_tmp, *src.split('/'))
925    common.ZipWrite(target_zip, unzipped_file, arcname=dst)
926
927  # Write new ab_partitions.txt file
928  common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
929
930  common.ZipClose(target_zip)
931
932  return target_file
933
934
935def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images):
936  """Returns a target-files.zip for custom partitions update.
937
938  This function modifies ab_partitions list with the desired custom partitions
939  and puts the custom images into the target target-files.zip.
940
941  Args:
942    input_file: The input target-files.zip filename.
943    custom_images: A map of custom partitions and custom images.
944
945  Returns:
946    The filename of a target-files.zip which has renamed the custom images in
947    the IMAGS/ to their partition names.
948  """
949  # Use zip2zip to avoid extracting the zipfile.
950  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
951  cmd = ['zip2zip', '-i', input_file, '-o', target_file]
952
953  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
954    namelist = input_zip.namelist()
955
956  # Write {custom_image}.img as {custom_partition}.img.
957  for custom_partition, custom_image in custom_images.items():
958    default_custom_image = '{}.img'.format(custom_partition)
959    if default_custom_image != custom_image:
960      logger.info("Update custom partition '%s' with '%s'",
961                  custom_partition, custom_image)
962      # Default custom image need to be deleted first.
963      namelist.remove('IMAGES/{}'.format(default_custom_image))
964      # IMAGES/{custom_image}.img:IMAGES/{custom_partition}.img.
965      cmd.extend(['IMAGES/{}:IMAGES/{}'.format(custom_image,
966                                               default_custom_image)])
967
968  cmd.extend(['{}:{}'.format(name, name) for name in namelist])
969  common.RunAndCheckOutput(cmd)
970
971  return target_file
972
973
974def GeneratePartitionTimestampFlags(partition_state):
975  partition_timestamps = [
976      part.partition_name + ":" + part.version
977      for part in partition_state]
978  return ["--partition_timestamps", ",".join(partition_timestamps)]
979
980
981def GeneratePartitionTimestampFlagsDowngrade(
982        pre_partition_state, post_partition_state):
983  assert pre_partition_state is not None
984  partition_timestamps = {}
985  for part in pre_partition_state:
986    partition_timestamps[part.partition_name] = part.version
987  for part in post_partition_state:
988    partition_timestamps[part.partition_name] = \
989        max(part.version, partition_timestamps[part.partition_name])
990  return [
991      "--partition_timestamps",
992      ",".join([key + ":" + val for (key, val)
993                in partition_timestamps.items()])
994  ]
995
996
997def IsSparseImage(filepath):
998  with open(filepath, 'rb') as fp:
999    # Magic for android sparse image format
1000    # https://source.android.com/devices/bootloader/images
1001    return fp.read(4) == b'\x3A\xFF\x26\xED'
1002
1003
1004def SupportsMainlineGkiUpdates(target_file):
1005  """Return True if the build supports MainlineGKIUpdates.
1006
1007  This function scans the product.img file in IMAGES/ directory for
1008  pattern |*/apex/com.android.gki.*.apex|. If there are files
1009  matching this pattern, conclude that build supports mainline
1010  GKI and return True
1011
1012  Args:
1013    target_file: Path to a target_file.zip, or an extracted directory
1014  Return:
1015    True if thisb uild supports Mainline GKI Updates.
1016  """
1017  if target_file is None:
1018    return False
1019  if os.path.isfile(target_file):
1020    target_file = common.UnzipTemp(target_file, ["IMAGES/product.img"])
1021  if not os.path.isdir(target_file):
1022    assert os.path.isdir(target_file), \
1023        "{} must be a path to zip archive or dir containing extracted"\
1024        " target_files".format(target_file)
1025  image_file = os.path.join(target_file, "IMAGES", "product.img")
1026
1027  if not os.path.isfile(image_file):
1028    return False
1029
1030  if IsSparseImage(image_file):
1031    # Unsparse the image
1032    tmp_img = common.MakeTempFile(suffix=".img")
1033    subprocess.check_output(["simg2img", image_file, tmp_img])
1034    image_file = tmp_img
1035
1036  cmd = ["debugfs_static", "-R", "ls -p /apex", image_file]
1037  output = subprocess.check_output(cmd).decode()
1038
1039  pattern = re.compile(r"com\.android\.gki\..*\.apex")
1040  return pattern.search(output) is not None
1041
1042
1043def GenerateAbOtaPackage(target_file, output_file, source_file=None):
1044  """Generates an Android OTA package that has A/B update payload."""
1045  # Stage the output zip package for package signing.
1046  if not OPTIONS.no_signing:
1047    staging_file = common.MakeTempFile(suffix='.zip')
1048  else:
1049    staging_file = output_file
1050  output_zip = zipfile.ZipFile(staging_file, "w",
1051                               compression=zipfile.ZIP_DEFLATED,
1052                               allowZip64=True)
1053
1054  if source_file is not None:
1055    assert "ab_partitions" in OPTIONS.source_info_dict, \
1056        "META/ab_partitions.txt is required for ab_update."
1057    assert "ab_partitions" in OPTIONS.target_info_dict, \
1058        "META/ab_partitions.txt is required for ab_update."
1059    target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
1060    source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
1061    # If source supports VABC, delta_generator/update_engine will attempt to
1062    # use VABC. This dangerous, as the target build won't have snapuserd to
1063    # serve I/O request when device boots. Therefore, disable VABC if source
1064    # build doesn't supports it.
1065    if not source_info.is_vabc or not target_info.is_vabc:
1066      logger.info("Either source or target does not support VABC, disabling.")
1067      OPTIONS.disable_vabc = True
1068
1069  else:
1070    assert "ab_partitions" in OPTIONS.info_dict, \
1071        "META/ab_partitions.txt is required for ab_update."
1072    target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
1073    source_info = None
1074
1075  if target_info.vendor_suppressed_vabc:
1076    logger.info("Vendor suppressed VABC. Disabling")
1077    OPTIONS.disable_vabc = True
1078  additional_args = []
1079
1080  # Prepare custom images.
1081  if OPTIONS.custom_images:
1082    target_file = GetTargetFilesZipForCustomImagesUpdates(
1083        target_file, OPTIONS.custom_images)
1084
1085  if OPTIONS.retrofit_dynamic_partitions:
1086    target_file = GetTargetFilesZipForRetrofitDynamicPartitions(
1087        target_file, target_info.get("super_block_devices").strip().split(),
1088        target_info.get("dynamic_partition_list").strip().split())
1089  elif OPTIONS.partial:
1090    target_file = GetTargetFilesZipForPartialUpdates(target_file,
1091                                                     OPTIONS.partial)
1092    additional_args += ["--is_partial_update", "true"]
1093  elif OPTIONS.skip_postinstall:
1094    target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
1095  # Target_file may have been modified, reparse ab_partitions
1096  with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
1097    target_info.info_dict['ab_partitions'] = zfp.read(
1098        AB_PARTITIONS).decode().strip().split("\n")
1099
1100  # Metadata to comply with Android OTA package format.
1101  metadata = GetPackageMetadata(target_info, source_info)
1102  # Generate payload.
1103  payload = Payload()
1104
1105  partition_timestamps_flags = []
1106  # Enforce a max timestamp this payload can be applied on top of.
1107  if OPTIONS.downgrade:
1108    max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
1109    partition_timestamps_flags = GeneratePartitionTimestampFlagsDowngrade(
1110        metadata.precondition.partition_state,
1111        metadata.postcondition.partition_state
1112    )
1113  else:
1114    max_timestamp = str(metadata.postcondition.timestamp)
1115    partition_timestamps_flags = GeneratePartitionTimestampFlags(
1116        metadata.postcondition.partition_state)
1117
1118  if OPTIONS.disable_vabc:
1119    additional_args += ["--disable_vabc", "true"]
1120  additional_args += ["--max_timestamp", max_timestamp]
1121
1122  if SupportsMainlineGkiUpdates(source_file):
1123    logger.warning(
1124        "Detected build with mainline GKI, include full boot image.")
1125    additional_args.extend(["--full_boot", "true"])
1126
1127  payload.Generate(
1128      target_file,
1129      source_file,
1130      additional_args + partition_timestamps_flags
1131  )
1132
1133  # Sign the payload.
1134  payload_signer = PayloadSigner()
1135  payload.Sign(payload_signer)
1136
1137  # Write the payload into output zip.
1138  payload.WriteToZip(output_zip)
1139
1140  # Generate and include the secondary payload that installs secondary images
1141  # (e.g. system_other.img).
1142  if OPTIONS.include_secondary:
1143    # We always include a full payload for the secondary slot, even when
1144    # building an incremental OTA. See the comments for "--include_secondary".
1145    secondary_target_file = GetTargetFilesZipForSecondaryImages(
1146        target_file, OPTIONS.skip_postinstall)
1147    secondary_payload = Payload(secondary=True)
1148    secondary_payload.Generate(secondary_target_file,
1149                               additional_args=["--max_timestamp",
1150                                                max_timestamp])
1151    secondary_payload.Sign(payload_signer)
1152    secondary_payload.WriteToZip(output_zip)
1153
1154  # If dm-verity is supported for the device, copy contents of care_map
1155  # into A/B OTA package.
1156  target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
1157  if (target_info.get("verity") == "true" or
1158          target_info.get("avb_enable") == "true"):
1159    care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
1160                     "META/" + x in target_zip.namelist()]
1161
1162    # Adds care_map if either the protobuf format or the plain text one exists.
1163    if care_map_list:
1164      care_map_name = care_map_list[0]
1165      care_map_data = target_zip.read("META/" + care_map_name)
1166      # In order to support streaming, care_map needs to be packed as
1167      # ZIP_STORED.
1168      common.ZipWriteStr(output_zip, care_map_name, care_map_data,
1169                         compress_type=zipfile.ZIP_STORED)
1170    else:
1171      logger.warning("Cannot find care map file in target_file package")
1172
1173  # Copy apex_info.pb over to generated OTA package.
1174  try:
1175    apex_info_entry = target_zip.getinfo("META/apex_info.pb")
1176    with target_zip.open(apex_info_entry, "r") as zfp:
1177      common.ZipWriteStr(output_zip, "apex_info.pb", zfp.read(),
1178                         compress_type=zipfile.ZIP_STORED)
1179  except KeyError:
1180    logger.warning("target_file doesn't contain apex_info.pb %s", target_file)
1181
1182  common.ZipClose(target_zip)
1183
1184  CheckVintfIfTrebleEnabled(target_file, target_info)
1185
1186  # We haven't written the metadata entry yet, which will be handled in
1187  # FinalizeMetadata().
1188  common.ZipClose(output_zip)
1189
1190  # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
1191  # all the info of the latter. However, system updaters and OTA servers need to
1192  # take time to switch to the new flag. We keep both of the flags for
1193  # P-timeframe, and will remove StreamingPropertyFiles in later release.
1194  needed_property_files = (
1195      AbOtaPropertyFiles(),
1196      StreamingPropertyFiles(),
1197  )
1198  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
1199
1200
1201def main(argv):
1202
1203  def option_handler(o, a):
1204    if o in ("-k", "--package_key"):
1205      OPTIONS.package_key = a
1206    elif o in ("-i", "--incremental_from"):
1207      OPTIONS.incremental_source = a
1208    elif o == "--full_radio":
1209      OPTIONS.full_radio = True
1210    elif o == "--full_bootloader":
1211      OPTIONS.full_bootloader = True
1212    elif o == "--wipe_user_data":
1213      OPTIONS.wipe_user_data = True
1214    elif o == "--downgrade":
1215      OPTIONS.downgrade = True
1216      OPTIONS.wipe_user_data = True
1217    elif o == "--override_timestamp":
1218      OPTIONS.downgrade = True
1219    elif o in ("-o", "--oem_settings"):
1220      OPTIONS.oem_source = a.split(',')
1221    elif o == "--oem_no_mount":
1222      OPTIONS.oem_no_mount = True
1223    elif o in ("-e", "--extra_script"):
1224      OPTIONS.extra_script = a
1225    elif o in ("-t", "--worker_threads"):
1226      if a.isdigit():
1227        OPTIONS.worker_threads = int(a)
1228      else:
1229        raise ValueError("Cannot parse value %r for option %r - only "
1230                         "integers are allowed." % (a, o))
1231    elif o in ("-2", "--two_step"):
1232      OPTIONS.two_step = True
1233    elif o == "--include_secondary":
1234      OPTIONS.include_secondary = True
1235    elif o == "--no_signing":
1236      OPTIONS.no_signing = True
1237    elif o == "--verify":
1238      OPTIONS.verify = True
1239    elif o == "--block":
1240      OPTIONS.block_based = True
1241    elif o in ("-b", "--binary"):
1242      OPTIONS.updater_binary = a
1243    elif o == "--stash_threshold":
1244      try:
1245        OPTIONS.stash_threshold = float(a)
1246      except ValueError:
1247        raise ValueError("Cannot parse value %r for option %r - expecting "
1248                         "a float" % (a, o))
1249    elif o == "--log_diff":
1250      OPTIONS.log_diff = a
1251    elif o == "--payload_signer":
1252      OPTIONS.payload_signer = a
1253    elif o == "--payload_signer_args":
1254      OPTIONS.payload_signer_args = shlex.split(a)
1255    elif o == "--payload_signer_maximum_signature_size":
1256      OPTIONS.payload_signer_maximum_signature_size = a
1257    elif o == "--payload_signer_key_size":
1258      # TODO(Xunchang) remove this option after cleaning up the callers.
1259      logger.warning("The option '--payload_signer_key_size' is deprecated."
1260                     " Use '--payload_signer_maximum_signature_size' instead.")
1261      OPTIONS.payload_signer_maximum_signature_size = a
1262    elif o == "--extracted_input_target_files":
1263      OPTIONS.extracted_input = a
1264    elif o == "--skip_postinstall":
1265      OPTIONS.skip_postinstall = True
1266    elif o == "--retrofit_dynamic_partitions":
1267      OPTIONS.retrofit_dynamic_partitions = True
1268    elif o == "--skip_compatibility_check":
1269      OPTIONS.skip_compatibility_check = True
1270    elif o == "--output_metadata_path":
1271      OPTIONS.output_metadata_path = a
1272    elif o == "--disable_fec_computation":
1273      OPTIONS.disable_fec_computation = True
1274    elif o == "--disable_verity_computation":
1275      OPTIONS.disable_verity_computation = True
1276    elif o == "--force_non_ab":
1277      OPTIONS.force_non_ab = True
1278    elif o == "--boot_variable_file":
1279      OPTIONS.boot_variable_file = a
1280    elif o == "--partial":
1281      partitions = a.split()
1282      if not partitions:
1283        raise ValueError("Cannot parse partitions in {}".format(a))
1284      OPTIONS.partial = partitions
1285    elif o == "--custom_image":
1286      custom_partition, custom_image = a.split("=")
1287      OPTIONS.custom_images[custom_partition] = custom_image
1288    elif o == "--disable_vabc":
1289      OPTIONS.disable_vabc = True
1290    elif o == "--spl_downgrade":
1291      OPTIONS.spl_downgrade = True
1292      OPTIONS.wipe_user_data = True
1293    elif o == "--vabc_downgrade":
1294      OPTIONS.vabc_downgrade = True
1295    else:
1296      return False
1297    return True
1298
1299  args = common.ParseOptions(argv, __doc__,
1300                             extra_opts="b:k:i:d:e:t:2o:",
1301                             extra_long_opts=[
1302                                 "package_key=",
1303                                 "incremental_from=",
1304                                 "full_radio",
1305                                 "full_bootloader",
1306                                 "wipe_user_data",
1307                                 "downgrade",
1308                                 "override_timestamp",
1309                                 "extra_script=",
1310                                 "worker_threads=",
1311                                 "two_step",
1312                                 "include_secondary",
1313                                 "no_signing",
1314                                 "block",
1315                                 "binary=",
1316                                 "oem_settings=",
1317                                 "oem_no_mount",
1318                                 "verify",
1319                                 "stash_threshold=",
1320                                 "log_diff=",
1321                                 "payload_signer=",
1322                                 "payload_signer_args=",
1323                                 "payload_signer_maximum_signature_size=",
1324                                 "payload_signer_key_size=",
1325                                 "extracted_input_target_files=",
1326                                 "skip_postinstall",
1327                                 "retrofit_dynamic_partitions",
1328                                 "skip_compatibility_check",
1329                                 "output_metadata_path=",
1330                                 "disable_fec_computation",
1331                                 "disable_verity_computation",
1332                                 "force_non_ab",
1333                                 "boot_variable_file=",
1334                                 "partial=",
1335                                 "custom_image=",
1336                                 "disable_vabc",
1337                                 "spl_downgrade",
1338                                 "vabc_downgrade",
1339                             ], extra_option_handler=option_handler)
1340
1341  if len(args) != 2:
1342    common.Usage(__doc__)
1343    sys.exit(1)
1344
1345  common.InitLogging()
1346
1347  # Load the build info dicts from the zip directly or the extracted input
1348  # directory. We don't need to unzip the entire target-files zips, because they
1349  # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
1350  # When loading the info dicts, we don't need to provide the second parameter
1351  # to common.LoadInfoDict(). Specifying the second parameter allows replacing
1352  # some properties with their actual paths, such as 'selinux_fc',
1353  # 'ramdisk_dir', which won't be used during OTA generation.
1354  if OPTIONS.extracted_input is not None:
1355    OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
1356  else:
1357    OPTIONS.info_dict = ParseInfoDict(args[0])
1358
1359  if OPTIONS.wipe_user_data:
1360    if not OPTIONS.vabc_downgrade:
1361      logger.info("Detected downgrade/datawipe OTA."
1362                  "When wiping userdata, VABC OTA makes the user "
1363                  "wait in recovery mode for merge to finish. Disable VABC by "
1364                  "default. If you really want to do VABC downgrade, pass "
1365                  "--vabc_downgrade")
1366      OPTIONS.disable_vabc = True
1367    # We should only allow downgrading incrementals (as opposed to full).
1368    # Otherwise the device may go back from arbitrary build with this full
1369    # OTA package.
1370    if OPTIONS.incremental_source is None:
1371      raise ValueError("Cannot generate downgradable full OTAs")
1372
1373  # TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the
1374  # target-file and reload the info_dict. So the info will be consistent with
1375  # the modified target-file.
1376
1377  logger.info("--- target info ---")
1378  common.DumpInfoDict(OPTIONS.info_dict)
1379
1380  # Load the source build dict if applicable.
1381  if OPTIONS.incremental_source is not None:
1382    OPTIONS.target_info_dict = OPTIONS.info_dict
1383    OPTIONS.source_info_dict = ParseInfoDict(OPTIONS.incremental_source)
1384
1385    logger.info("--- source info ---")
1386    common.DumpInfoDict(OPTIONS.source_info_dict)
1387
1388  if OPTIONS.partial:
1389    OPTIONS.info_dict['ab_partitions'] = \
1390        list(
1391        set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
1392    )
1393    if OPTIONS.source_info_dict:
1394      OPTIONS.source_info_dict['ab_partitions'] = \
1395          list(
1396          set(OPTIONS.source_info_dict['ab_partitions']) &
1397          set(OPTIONS.partial)
1398      )
1399
1400  # Load OEM dicts if provided.
1401  OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
1402
1403  # Assume retrofitting dynamic partitions when base build does not set
1404  # use_dynamic_partitions but target build does.
1405  if (OPTIONS.source_info_dict and
1406      OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
1407          OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
1408    if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
1409      raise common.ExternalError(
1410          "Expect to generate incremental OTA for retrofitting dynamic "
1411          "partitions, but dynamic_partition_retrofit is not set in target "
1412          "build.")
1413    logger.info("Implicitly generating retrofit incremental OTA.")
1414    OPTIONS.retrofit_dynamic_partitions = True
1415
1416  # Skip postinstall for retrofitting dynamic partitions.
1417  if OPTIONS.retrofit_dynamic_partitions:
1418    OPTIONS.skip_postinstall = True
1419
1420  ab_update = OPTIONS.info_dict.get("ab_update") == "true"
1421  allow_non_ab = OPTIONS.info_dict.get("allow_non_ab") == "true"
1422  if OPTIONS.force_non_ab:
1423    assert allow_non_ab,\
1424        "--force_non_ab only allowed on devices that supports non-A/B"
1425    assert ab_update, "--force_non_ab only allowed on A/B devices"
1426
1427  generate_ab = not OPTIONS.force_non_ab and ab_update
1428
1429  # Use the default key to sign the package if not specified with package_key.
1430  # package_keys are needed on ab_updates, so always define them if an
1431  # A/B update is getting created.
1432  if not OPTIONS.no_signing or generate_ab:
1433    if OPTIONS.package_key is None:
1434      OPTIONS.package_key = OPTIONS.info_dict.get(
1435          "default_system_dev_certificate",
1436          "build/make/target/product/security/testkey")
1437    # Get signing keys
1438    OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
1439    private_key_path = OPTIONS.package_key + OPTIONS.private_key_suffix
1440    if not os.path.exists(private_key_path):
1441      raise common.ExternalError(
1442          "Private key {} doesn't exist. Make sure you passed the"
1443          " correct key path through -k option".format(
1444              private_key_path)
1445      )
1446
1447  if OPTIONS.source_info_dict:
1448    source_build_prop = OPTIONS.source_info_dict["build.prop"]
1449    target_build_prop = OPTIONS.target_info_dict["build.prop"]
1450    source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
1451    target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
1452    is_spl_downgrade = target_spl < source_spl
1453    if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
1454      raise common.ExternalError(
1455          "Target security patch level {} is older than source SPL {} applying "
1456          "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
1457          "to override this check. This script expects security patch level to "
1458          "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
1459          "separators other than -, so as long as it's used consistenly across "
1460          "all SPL dates".format(target_spl, source_spl))
1461    elif not is_spl_downgrade and OPTIONS.spl_downgrade:
1462      raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
1463                       " detected. Please only pass in this flag if you want a"
1464                       " SPL downgrade. Target SPL: {} Source SPL: {}"
1465                       .format(target_spl, source_spl))
1466  if generate_ab:
1467    GenerateAbOtaPackage(
1468        target_file=args[0],
1469        output_file=args[1],
1470        source_file=OPTIONS.incremental_source)
1471
1472  else:
1473    GenerateNonAbOtaPackage(
1474        target_file=args[0],
1475        output_file=args[1],
1476        source_file=OPTIONS.incremental_source)
1477
1478  # Post OTA generation works.
1479  if OPTIONS.incremental_source is not None and OPTIONS.log_diff:
1480    logger.info("Generating diff logs...")
1481    logger.info("Unzipping target-files for diffing...")
1482    target_dir = common.UnzipTemp(args[0], TARGET_DIFFING_UNZIP_PATTERN)
1483    source_dir = common.UnzipTemp(
1484        OPTIONS.incremental_source, TARGET_DIFFING_UNZIP_PATTERN)
1485
1486    with open(OPTIONS.log_diff, 'w') as out_file:
1487      target_files_diff.recursiveDiff(
1488          '', source_dir, target_dir, out_file)
1489
1490  logger.info("done.")
1491
1492
1493if __name__ == '__main__':
1494  try:
1495    common.CloseInheritedPipes()
1496    main(sys.argv[1:])
1497  except common.ExternalError:
1498    logger.exception("\n   ERROR:\n")
1499    sys.exit(1)
1500  finally:
1501    common.Cleanup()
1502