1# Copyright 2017 The Chromium OS Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import json 6import logging 7import os 8import update_engine_event as uee 9import urlparse 10 11from autotest_lib.client.common_lib import error 12from autotest_lib.client.common_lib import lsbrelease_utils 13from autotest_lib.client.common_lib import utils 14from autotest_lib.client.common_lib.cros import dev_server 15from autotest_lib.client.cros.update_engine import update_engine_util 16from autotest_lib.server import autotest 17from autotest_lib.server import test 18from autotest_lib.server.cros.dynamic_suite import tools 19from autotest_lib.server.cros.update_engine import omaha_devserver 20from chromite.lib import retry_util 21from datetime import datetime, timedelta 22from update_engine_event import UpdateEngineEvent 23 24 25class UpdateEngineTest(test.test, update_engine_util.UpdateEngineUtil): 26 """Class for comparing expected update_engine events against actual ones. 27 28 During a rootfs update, there are several events that are fired (e.g. 29 download_started, download_finished, update_started etc). Each event has 30 properties associated with it that need to be verified. 31 32 In this class we build a list of expected events (list of 33 UpdateEngineEvent objects), and compare that against a "hostlog" returned 34 from update_engine from the update. This hostlog is a json list of 35 events fired during the update. It is accessed by the api/hostlog URL on the 36 devserver during the update. 37 38 We can also verify the hostlog of a one-time update event that is fired 39 after rebooting after an update. 40 41 During a typical autoupdate we will check both of these hostlogs. 42 """ 43 version = 1 44 45 # Timeout periods, given in seconds. 46 _INITIAL_CHECK_TIMEOUT = 12 * 60 47 _DOWNLOAD_STARTED_TIMEOUT = 4 * 60 48 # See https://crbug.com/731214 before changing _DOWNLOAD_FINISHED_TIMEOUT 49 _DOWNLOAD_FINISHED_TIMEOUT = 20 * 60 50 _UPDATE_COMPLETED_TIMEOUT = 4 * 60 51 _POST_REBOOT_TIMEOUT = 15 * 60 52 53 # The names of the two hostlog files we will be verifying 54 _DEVSERVER_HOSTLOG_ROOTFS = 'devserver_hostlog_rootfs' 55 _DEVSERVER_HOSTLOG_REBOOT = 'devserver_hostlog_reboot' 56 57 # Version we tell the DUT it is on before update. 58 _CUSTOM_LSB_VERSION = '0.0.0.0' 59 60 # Expected hostlog events during update: 4 during rootfs 61 _ROOTFS_HOSTLOG_EVENTS = 4 62 63 _CELLULAR_BUCKET = 'gs://chromeos-throw-away-bucket/CrOSPayloads/Cellular/' 64 65 66 def initialize(self, host=None, hosts=None): 67 """ 68 Sets default variables for the test. 69 70 @param host: The DUT we will be running on. 71 @param hosts: If we are running a test with multiple DUTs (eg P2P) 72 we will use hosts instead of host. 73 74 """ 75 self._hostlog_filename = None 76 self._hostlog_events = [] 77 self._num_consumed_events = 0 78 self._current_timestamp = None 79 self._expected_events = [] 80 self._omaha_devserver = None 81 self._host = host 82 # Some AU tests use multiple DUTs 83 self._hosts = hosts 84 85 # Define functions used in update_engine_util. 86 self._run = self._host.run if self._host else None 87 self._get_file = self._host.get_file if self._host else None 88 89 90 def cleanup(self): 91 if self._omaha_devserver is not None: 92 self._omaha_devserver.stop_devserver() 93 if self._host: 94 self._host.get_file(self._UPDATE_ENGINE_LOG, self.resultsdir) 95 96 97 def _get_expected_events_for_rootfs_update(self, source_release): 98 """Creates a list of expected events fired during a rootfs update. 99 100 There are 4 events fired during a rootfs update. We will create these 101 in the correct order with the correct data, timeout, and error 102 condition function. 103 """ 104 initial_check = UpdateEngineEvent( 105 version=source_release, 106 on_error=self._error_initial_check) 107 download_started = UpdateEngineEvent( 108 event_type=uee.EVENT_TYPE_DOWNLOAD_STARTED, 109 event_result=uee.EVENT_RESULT_SUCCESS, 110 version=source_release, 111 on_error=self._error_incorrect_event) 112 download_finished = UpdateEngineEvent( 113 event_type=uee.EVENT_TYPE_DOWNLOAD_FINISHED, 114 event_result=uee.EVENT_RESULT_SUCCESS, 115 version=source_release, 116 on_error=self._error_incorrect_event) 117 update_complete = UpdateEngineEvent( 118 event_type=uee.EVENT_TYPE_UPDATE_COMPLETE, 119 event_result=uee.EVENT_RESULT_SUCCESS, 120 version=source_release, 121 on_error=self._error_incorrect_event) 122 123 # There is an error message if any of them take too long to fire. 124 initial_error = self._timeout_error_message('an initial update check', 125 self._INITIAL_CHECK_TIMEOUT) 126 dls_error = self._timeout_error_message('a download started ' 127 'notification', 128 self._DOWNLOAD_STARTED_TIMEOUT, 129 uee.EVENT_TYPE_DOWNLOAD_STARTED) 130 dlf_error = self._timeout_error_message('a download finished ' 131 'notification', 132 self._DOWNLOAD_FINISHED_TIMEOUT, 133 uee.EVENT_TYPE_DOWNLOAD_FINISHED 134 ) 135 uc_error = self._timeout_error_message('an update complete ' 136 'notification', 137 self._UPDATE_COMPLETED_TIMEOUT, 138 uee.EVENT_TYPE_UPDATE_COMPLETE) 139 140 # Build an array of tuples (event, timeout, timeout_error_message) 141 self._expected_events = [ 142 (initial_check, self._INITIAL_CHECK_TIMEOUT, initial_error), 143 (download_started, self._DOWNLOAD_STARTED_TIMEOUT, dls_error), 144 (download_finished, self._DOWNLOAD_FINISHED_TIMEOUT, dlf_error), 145 (update_complete, self._UPDATE_COMPLETED_TIMEOUT, uc_error) 146 ] 147 148 149 def _get_expected_event_for_post_reboot_check(self, source_release, 150 target_release): 151 """Creates the expected event fired during post-reboot update check.""" 152 post_reboot_check = UpdateEngineEvent( 153 event_type=uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE, 154 event_result=uee.EVENT_RESULT_SUCCESS, 155 version=target_release, 156 previous_version=source_release, 157 on_error=self._error_reboot_after_update) 158 err = self._timeout_error_message('a successful reboot ' 159 'notification', 160 self._POST_REBOOT_TIMEOUT, 161 uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE) 162 163 self._expected_events = [ 164 (post_reboot_check, self._POST_REBOOT_TIMEOUT, err) 165 ] 166 167 168 def _read_hostlog_events(self): 169 """Read the list of events from the hostlog json file.""" 170 if len(self._hostlog_events) <= self._num_consumed_events: 171 try: 172 with open(self._hostlog_filename, 'r') as out_log: 173 self._hostlog_events = json.loads(out_log.read()) 174 except Exception as e: 175 raise error.TestFail('Error while reading the hostlogs ' 176 'from devserver: %s' % e) 177 178 179 def _get_next_hostlog_event(self): 180 """Returns the next event from the hostlog json file. 181 182 @return The next new event in the host log 183 None if no such event was found or an error occurred. 184 """ 185 self._read_hostlog_events() 186 # Return next new event, if one is found. 187 if len(self._hostlog_events) > self._num_consumed_events: 188 new_event = { 189 key: str(val) for key, val 190 in self._hostlog_events[self._num_consumed_events].iteritems() 191 } 192 self._num_consumed_events += 1 193 logging.info('Consumed new event: %s', new_event) 194 return new_event 195 196 197 def _verify_event_with_timeout(self, expected_event, timeout, on_timeout): 198 """Verify an expected event occurs within a given timeout. 199 200 @param expected_event: an expected event 201 @param timeout: specified in seconds 202 @param on_timeout: A string to return if timeout occurs, or None. 203 204 @return None if event complies, an error string otherwise. 205 """ 206 actual_event = self._get_next_hostlog_event() 207 if actual_event: 208 # If this is the first event, set it as the current time 209 if self._current_timestamp is None: 210 self._current_timestamp = datetime.strptime(actual_event[ 211 'timestamp'], 212 '%Y-%m-%d %H:%M:%S') 213 214 # Get the time stamp for the current event and convert to datetime 215 timestamp = actual_event['timestamp'] 216 event_timestamp = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S') 217 218 # Add the timeout onto the timestamp to get its expiry 219 event_timeout = self._current_timestamp + timedelta(seconds=timeout) 220 221 # If the event happened before the timeout 222 if event_timestamp < event_timeout: 223 difference = event_timestamp - self._current_timestamp 224 logging.info('Event took %s seconds to fire during the ' 225 'update', difference.seconds) 226 result = expected_event.equals(actual_event) 227 self._current_timestamp = event_timestamp 228 return result 229 230 logging.error('The expected event was not found in the hostlog: %s', 231 expected_event) 232 return on_timeout 233 234 235 def _error_initial_check(self, expected, actual, mismatched_attrs): 236 """Error message for when update fails at initial update check.""" 237 err_msg = ('The update test appears to have completed successfully but ' 238 'we found a problem while verifying the hostlog of events ' 239 'returned from the update. Some attributes reported for ' 240 'the initial update check event are not what we expected: ' 241 '%s. ' % mismatched_attrs) 242 if 'version' in mismatched_attrs: 243 err_msg += ('The expected version is (%s) but reported version was ' 244 '(%s). ' % (expected['version'], actual['version'])) 245 err_msg += ('If reported version = target version, it is likely ' 246 'we retried the update because the test thought the ' 247 'first attempt failed but it actually succeeded ' 248 '(e.g due to SSH disconnect, DUT not reachable by ' 249 'hostname, applying stateful failed after rootfs ' 250 'succeeded). This second update attempt is then started' 251 ' from the target version instead of the source ' 252 'version, so our hostlog verification is invalid.') 253 err_msg += ('Check the full hostlog for this update in the %s file in ' 254 'the %s directory.' % (self._DEVSERVER_HOSTLOG_ROOTFS, 255 dev_server.AUTO_UPDATE_LOG_DIR)) 256 return err_msg 257 258 259 def _error_incorrect_event(self, expected, actual, mismatched_attrs): 260 """Error message for when an event is not what we expect.""" 261 return ('The update appears to have completed successfully but ' 262 'when analysing the update events in the hostlog we have ' 263 'found that one of the events is incorrect. This should ' 264 'never happen. The mismatched attributes are: %s. We expected ' 265 '%s, but got %s.' % (mismatched_attrs, expected, actual)) 266 267 268 def _error_reboot_after_update(self, expected, actual, mismatched_attrs): 269 """Error message for problems in the post-reboot update check.""" 270 err_msg = ('The update completed successfully but there was a problem ' 271 'with the post-reboot update check. After a successful ' 272 'update, we do a final update check to parse a unique ' 273 'omaha request. The mistmatched attributes for this update ' 274 'check were %s. ' % mismatched_attrs) 275 if 'event_result' in mismatched_attrs: 276 err_msg += ('The event_result was expected to be (%s:%s) but ' 277 'reported (%s:%s). ' % 278 (expected['event_result'], 279 uee.get_event_result(expected['event_result']), 280 actual.get('event_result'), 281 uee.get_event_result(actual.get('event_result')))) 282 if 'event_type' in mismatched_attrs: 283 err_msg += ('The event_type was expeted to be (%s:%s) but ' 284 'reported (%s:%s). ' % 285 (expected['event_type'], 286 uee.get_event_type(expected['event_type']), 287 actual.get('event_type'), 288 uee.get_event_type(actual.get('event_type')))) 289 if 'version' in mismatched_attrs: 290 err_msg += ('The version was expected to be (%s) but ' 291 'reported (%s). This probably means that the payload ' 292 'we applied was incorrect or corrupt. ' % 293 (expected['version'], actual['version'])) 294 if 'previous_version' in mismatched_attrs: 295 err_msg += ('The previous version is expected to be (%s) but ' 296 'reported (%s). This can happen if we retried the ' 297 'update after rootfs update completed on the first ' 298 'attempt then we failed. Or if stateful got wiped and ' 299 '/var/lib/update_engine/prefs/previous-version was ' 300 'deleted. ' % (expected['previous_version'], 301 actual['previous_version'])) 302 err_msg += ('You can see the full hostlog for this update check in ' 303 'the %s file within the %s directory. ' % 304 (self._DEVSERVER_HOSTLOG_REBOOT, 305 dev_server.AUTO_UPDATE_LOG_DIR)) 306 return err_msg 307 308 309 def _timeout_error_message(self, desc, timeout, event_type=None): 310 """Error message for when an event takes too long to fire.""" 311 if event_type is not None: 312 desc += ' (%s)' % uee.get_event_type(event_type) 313 return ('The update completed successfully but one of the steps of ' 314 'the update took longer than we would like. We failed to ' 315 'receive %s within %d seconds.' % (desc, timeout)) 316 317 318 def _stage_payload_by_uri(self, payload_uri): 319 """Stage a payload based on its GS URI. 320 321 This infers the build's label, filename and GS archive from the 322 provided GS URI. 323 324 @param payload_uri: The full GS URI of the payload. 325 326 @return URL of the staged payload on the server. 327 328 @raise error.TestError if there's a problem with staging. 329 330 """ 331 archive_url, _, filename = payload_uri.rpartition('/') 332 build_name = urlparse.urlsplit(archive_url).path.strip('/') 333 return self._stage_payload(build_name, filename, 334 archive_url=archive_url) 335 336 337 def _stage_payload(self, build_name, filename, archive_url=None): 338 """Stage the given payload onto the devserver. 339 340 Works for either a stateful or full/delta test payload. Expects the 341 gs_path or a combo of build_name + filename. 342 343 @param build_name: The build name e.g. x86-mario-release/<version>. 344 If set, assumes default gs archive bucket and 345 requires filename to be specified. 346 @param filename: In conjunction with build_name, this is the file you 347 are downloading. 348 @param archive_url: An optional GS archive location, if not using the 349 devserver's default. 350 351 @return URL of the staged payload on the server. 352 353 @raise error.TestError if there's a problem with staging. 354 355 """ 356 try: 357 self._autotest_devserver.stage_artifacts(image=build_name, 358 files=[filename], 359 archive_url=archive_url) 360 return self._autotest_devserver.get_staged_file_url(filename, 361 build_name) 362 except dev_server.DevServerException, e: 363 raise error.TestError('Failed to stage payload: %s' % e) 364 365 366 def _get_payload_url(self, build=None, full_payload=True): 367 """ 368 Gets the GStorage URL of the full or delta payload for this build. 369 370 @param build: build string e.g samus-release/R65-10225.0.0. 371 @param full_payload: True for full payload. False for delta. 372 373 @returns the payload URL. 374 375 """ 376 if build is None: 377 if self._job_repo_url is None: 378 self._job_repo_url = self._get_job_repo_url() 379 ds_url, build = tools.get_devserver_build_from_package_url( 380 self._job_repo_url) 381 self._autotest_devserver = dev_server.ImageServer(ds_url) 382 383 gs = dev_server._get_image_storage_server() 384 if full_payload: 385 # Example: chromeos_R65-10225.0.0_samus_full_dev.bin 386 regex = 'chromeos_%s*_full_*' % build.rpartition('/')[2] 387 else: 388 # Example: chromeos_R65-10225.0.0_R65-10225.0.0_samus_delta_dev.bin 389 regex = 'chromeos_%s*_delta_*' % build.rpartition('/')[2] 390 payload_url_regex = gs + build + '/' + regex 391 logging.debug('Trying to find payloads at %s', payload_url_regex) 392 payloads = utils.gs_ls(payload_url_regex) 393 if not payloads: 394 raise error.TestFail('Could not find payload for %s', build) 395 logging.debug('Payloads found: %s', payloads) 396 return payloads[0] 397 398 399 def _get_staged_file_info(self, staged_url, retries=5): 400 """ 401 Gets the staged files info that includes SHA256 and size. 402 403 @param staged_url: the staged file url. 404 @param retries: Number of times to try get the file info. 405 406 @returns file info (SHA256 and size). 407 408 """ 409 split_url = staged_url.rpartition('/static/') 410 file_info_url = os.path.join(split_url[0], 'api/fileinfo', split_url[2]) 411 logging.info('file info url: %s', file_info_url) 412 devserver_hostname = urlparse.urlparse(file_info_url).hostname 413 cmd = 'ssh %s \'curl "%s"\'' % (devserver_hostname, 414 utils.sh_escape(file_info_url)) 415 for i in range(retries): 416 try: 417 result = utils.run(cmd).stdout 418 return json.loads(result) 419 except error.CmdError as e: 420 logging.error('Failed to read file info: %s', e) 421 raise error.TestError('Could not reach fileinfo API on devserver.') 422 423 424 def _get_job_repo_url(self): 425 """Gets the job_repo_url argument supplied to the test by the lab.""" 426 if self._hosts is not None: 427 self._host = self._hosts[0] 428 if self._host is None: 429 raise error.TestFail('No host specified by AU test.') 430 info = self._host.host_info_store.get() 431 return info.attributes.get(self._host.job_repo_url_attribute, '') 432 433 434 def _copy_payload_to_public_bucket(self, payload_url): 435 """ 436 Copy payload and make link public. 437 438 @param payload_url: Payload URL on Google Storage. 439 440 @returns The payload URL that is now publicly accessible. 441 442 """ 443 payload_filename = payload_url.rpartition('/')[2] 444 utils.run('gsutil cp %s %s' % (payload_url, self._CELLULAR_BUCKET)) 445 new_gs_url = self._CELLULAR_BUCKET + payload_filename 446 utils.run('gsutil acl ch -u AllUsers:R %s' % new_gs_url) 447 return new_gs_url.replace('gs://', 'https://storage.googleapis.com/') 448 449 450 def _get_chromeos_version(self): 451 """Read the ChromeOS version from /etc/lsb-release.""" 452 lsb = self._host.run('cat /etc/lsb-release').stdout 453 return lsbrelease_utils.get_chromeos_release_version(lsb) 454 455 456 def _check_for_cellular_entries_in_update_log(self, update_engine_log=None): 457 """ 458 Check update_engine.log for log entries about cellular. 459 460 @param update_engine_log: The text of an update_engine.log file. 461 462 """ 463 logging.info('Making sure we have cellular entries in update_engine ' 464 'log.') 465 line1 = "Allowing updates over cellular as permission preference is " \ 466 "set to true." 467 line2 = "We are connected via cellular, Updates allowed: Yes" 468 for line in [line1, line2]: 469 self._check_update_engine_log_for_entry(line, raise_error=True, 470 update_engine_log= 471 update_engine_log) 472 473 474 def _disconnect_then_reconnect_network(self, update_url): 475 """ 476 Disconnects the network for a couple of minutes then reconnects. 477 478 @param update_url: A URL to use to check we are online. 479 480 """ 481 self._run_client_test_and_check_result( 482 'autoupdate_DisconnectReconnectNetwork', update_url=update_url) 483 484 485 def _suspend_then_resume(self): 486 """Susepends and resumes the host DUT.""" 487 try: 488 self._host.suspend(suspend_time=30) 489 except error.AutoservSuspendError: 490 logging.exception('Suspend did not last the entire time.') 491 492 493 def _run_client_test_and_check_result(self, test_name, **kwargs): 494 """ 495 Kicks of a client autotest and checks that it didn't fail. 496 497 @param test_name: client test name 498 @param **kwargs: key-value arguments to pass to the test. 499 500 """ 501 client_at = autotest.Autotest(self._host) 502 client_at.run_test(test_name, **kwargs) 503 client_at._check_client_test_result(self._host, test_name) 504 505 506 def _create_hostlog_files(self): 507 """Create the two hostlog files for the update. 508 509 To ensure the update was successful we need to compare the update 510 events against expected update events. There is a hostlog for the 511 rootfs update and for the post reboot update check. 512 """ 513 hostlog = self._omaha_devserver.get_hostlog(self._host.ip, 514 wait_for_reboot_events=True) 515 logging.info('Hostlog: %s', hostlog) 516 517 # File names to save the hostlog events to. 518 rootfs_hostlog = os.path.join(self.resultsdir, 'hostlog_rootfs') 519 reboot_hostlog = os.path.join(self.resultsdir, 'hostlog_reboot') 520 521 # Each time we reboot in the middle of an update we ping omaha again 522 # for each update event. So parse the list backwards to get the final 523 # events. 524 with open(reboot_hostlog, 'w') as outfile: 525 json.dump(hostlog[-1:], outfile) 526 with open(rootfs_hostlog, 'w') as outfile: 527 json.dump( 528 hostlog[len(hostlog) - 1 - self._ROOTFS_HOSTLOG_EVENTS:-1], 529 outfile) 530 531 return rootfs_hostlog, reboot_hostlog 532 533 534 def _set_active_p2p_host(self, host): 535 """ 536 Choose which p2p host device to run commands on. 537 538 For P2P tests with multiple DUTs we need to be able to choose which 539 host within self._hosts we want to issue commands on. 540 541 @param host: The host to run commands on. 542 543 """ 544 self._create_update_engine_variables(host.run, host.get_file) 545 546 547 def _run_client_test_and_check_result(self, test_name, **kwargs): 548 """ 549 Kicks of a client autotest and checks that it didn't fail. 550 551 @param test_name: client test name 552 @param **kwargs: key-value arguments to pass to the test. 553 554 """ 555 client_at = autotest.Autotest(self._host) 556 client_at.run_test(test_name, **kwargs) 557 client_at._check_client_test_result(self._host, test_name) 558 559 560 def _change_cellular_setting_in_update_engine(self, 561 update_over_cellular=True): 562 """ 563 Toggles the update_over_cellular setting in update_engine. 564 565 @param update_over_cellular: True to enable, False to disable. 566 567 """ 568 answer = 'yes' if update_over_cellular else 'no' 569 cmd = 'update_engine_client --update_over_cellular=%s' % answer 570 retry_util.RetryException(error.AutoservRunError, 2, self._run, cmd) 571 572 573 def verify_update_events(self, source_release, hostlog_filename, 574 target_release=None): 575 """Compares a hostlog file against a set of expected events. 576 577 This is the main function of this class. It takes in an expected 578 source and target version along with a hostlog file location. It will 579 then generate the expected events based on the data and compare it 580 against the events listed in the hostlog json file. 581 """ 582 self._hostlog_events = [] 583 self._num_consumed_events = 0 584 self._current_timestamp = None 585 if target_release is not None: 586 self._get_expected_event_for_post_reboot_check(source_release, 587 target_release) 588 else: 589 self._get_expected_events_for_rootfs_update(source_release) 590 591 self._hostlog_filename = hostlog_filename 592 logging.info('Checking update steps with hostlog file: %s', 593 self._hostlog_filename) 594 595 for expected_event, timeout, on_timeout in self._expected_events: 596 logging.info('Expecting %s within %s seconds', expected_event, 597 timeout) 598 err_msg = self._verify_event_with_timeout( 599 expected_event, timeout, on_timeout) 600 if err_msg is not None: 601 logging.error('Failed expected event: %s', err_msg) 602 raise UpdateEngineEventMissing(err_msg) 603 604 605 def get_update_url_for_test(self, job_repo_url, full_payload=True, 606 critical_update=False, max_updates=1, 607 public=False): 608 """ 609 Get the correct update URL for autoupdate tests to use. 610 611 There are bunch of different update configurations that are required 612 by AU tests. Some tests need a full payload, some need a delta payload. 613 Some require the omaha response to be critical or be able to handle 614 multiple DUTs etc. This function returns the correct update URL to the 615 test based on the inputs parameters. 616 617 Ideally all updates would use an existing lab devserver to handle the 618 updates. However the lab devservers default setup does not work for 619 all test needs. So we also kick off our own omaha_devserver for the 620 test run some times. 621 622 This tests expects the test to set self._host or self._hosts. 623 624 @param job_repo_url: string url containing the current build. 625 @param full_payload: bool whether we want a full payload. 626 @param critical_update: bool whether we need a critical update. 627 @param max_updates: int number of updates the test will perform. This 628 is passed to src/platform/dev/devserver.py if we 629 create our own deverver. 630 @param public: url needs to be publicly accessible. 631 632 @returns an update url string. 633 634 """ 635 if job_repo_url is None: 636 self._job_repo_url = self._get_job_repo_url() 637 else: 638 self._job_repo_url = job_repo_url 639 if not self._job_repo_url: 640 raise error.TestFail('There was no job_repo_url so we cannot get ' 641 'a payload to use.') 642 ds_url, build = tools.get_devserver_build_from_package_url( 643 self._job_repo_url) 644 645 # We always stage the payloads on the existing lab devservers. 646 self._autotest_devserver = dev_server.ImageServer(ds_url) 647 648 if public: 649 # Get the google storage url of the payload. We will be copying 650 # the payload to a public google storage bucket (similar location 651 # to updates via autest command). 652 payload_url = self._get_payload_url(build, 653 full_payload=full_payload) 654 url = self._copy_payload_to_public_bucket(payload_url) 655 logging.info('Public update URL: %s', url) 656 return url 657 658 if full_payload: 659 self._autotest_devserver.stage_artifacts(build, ['full_payload']) 660 if not critical_update: 661 # We can use the same lab devserver to handle the update. 662 url = self._autotest_devserver.get_update_url(build) 663 logging.info('Full payload, non-critical update URL: %s', url) 664 return url 665 else: 666 staged_url = self._autotest_devserver._get_image_url(build) 667 else: 668 # We need to stage delta ourselves due to crbug.com/793434. 669 delta_payload = self._get_payload_url(build, full_payload=False) 670 staged_url = self._stage_payload_by_uri(delta_payload) 671 672 # We need to start our own devserver for the rest of the cases. 673 self._omaha_devserver = omaha_devserver.OmahaDevserver( 674 self._autotest_devserver.hostname, staged_url, 675 max_updates=max_updates, critical_update=critical_update) 676 self._omaha_devserver.start_devserver() 677 url = self._omaha_devserver.get_update_url() 678 logging.info('Update URL: %s', url) 679 return url 680 681 682class UpdateEngineEventMissing(error.TestFail): 683 """Raised if the hostlog is missing an expected event.""" 684