1#!/usr/bin/python 2 3import logging, os, signal, unittest 4import common 5from autotest_lib.client.common_lib import enum, global_config, host_protections 6from autotest_lib.database import database_connection 7from autotest_lib.frontend import setup_django_environment 8from autotest_lib.frontend.afe import frontend_test_utils, models 9from autotest_lib.frontend.afe import model_attributes 10from autotest_lib.scheduler import drone_manager, email_manager 11from autotest_lib.scheduler import monitor_db, scheduler_models 12from autotest_lib.scheduler import scheduler_config 13from autotest_lib.scheduler import scheduler_lib 14 15HqeStatus = models.HostQueueEntry.Status 16HostStatus = models.Host.Status 17 18class NullMethodObject(object): 19 _NULL_METHODS = () 20 21 def __init__(self): 22 def null_method(*args, **kwargs): 23 pass 24 25 for method_name in self._NULL_METHODS: 26 setattr(self, method_name, null_method) 27 28# the SpecialTask names here must match the suffixes used on the SpecialTask 29# results directories 30_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather', 31 'parse', 'archive', 'reset', 'provision') 32 33 34_PIDFILE_TO_PIDFILE_TYPE = { 35 drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB, 36 drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER, 37 drone_manager.PARSER_PID_FILE: _PidfileType.PARSE, 38 drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE, 39 } 40 41 42_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value 43 in _PIDFILE_TO_PIDFILE_TYPE.iteritems()) 44 45 46class MockConnectionManager(object): 47 """docstring for MockConnectionManager""" 48 49 db = None 50 51 def __init__(self): 52 super(MockConnectionManager, self).__init__() 53 54 def get_connection(self): 55 assert MockConnectionManager.db 56 return MockConnectionManager.db 57 58 59class MockDroneManager(NullMethodObject): 60 """ 61 Public attributes: 62 max_runnable_processes_value: value returned by max_runnable_processes(). 63 tests can change this to activate throttling. 64 """ 65 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository', 66 'copy_results_on_drone', 'trigger_refresh', 'sync_refresh') 67 68 class _DummyPidfileId(object): 69 """ 70 Object to represent pidfile IDs that is opaque to the scheduler code but 71 still debugging-friendly for us. 72 """ 73 def __init__(self, working_directory, pidfile_name, num_processes=None): 74 self._working_directory = working_directory 75 self._pidfile_name = pidfile_name 76 self._num_processes = num_processes 77 self._paired_with_pidfile = None 78 79 80 def key(self): 81 """Key for MockDroneManager._pidfile_index""" 82 return (self._working_directory, self._pidfile_name) 83 84 85 def __str__(self): 86 return os.path.join(self._working_directory, self._pidfile_name) 87 88 89 def __repr__(self): 90 return '<_DummyPidfileId: %s>' % str(self) 91 92 93 def __init__(self): 94 super(MockDroneManager, self).__init__() 95 self.process_capacity = 100 96 97 # maps result_dir to set of tuples (file_path, file_contents) 98 self._attached_files = {} 99 # maps pidfile IDs to PidfileContents 100 self._pidfiles = {} 101 # pidfile IDs that haven't been created yet 102 self._future_pidfiles = [] 103 # maps _PidfileType to the most recently created pidfile ID of that type 104 self._last_pidfile_id = {} 105 # maps (working_directory, pidfile_name) to pidfile IDs 106 self._pidfile_index = {} 107 # maps process to pidfile IDs 108 self._process_index = {} 109 # tracks pidfiles of processes that have been killed 110 self._pids_to_signals_received = {} 111 # pidfile IDs that have just been unregistered (so will disappear on the 112 # next cycle) 113 self._unregistered_pidfiles = set() 114 # Pids to write exit status for at end of tick 115 self._set_pidfile_exit_status_queue = [] 116 117 # utility APIs for use by the test 118 119 def finish_process(self, pidfile_type, exit_status=0): 120 pidfile_id = self._last_pidfile_id[pidfile_type] 121 self._set_pidfile_exit_status(pidfile_id, exit_status) 122 123 124 def finish_specific_process(self, working_directory, pidfile_name): 125 pidfile_id = self.pidfile_from_path(working_directory, pidfile_name) 126 self._set_pidfile_exit_status(pidfile_id, 0) 127 128 def finish_active_process_on_host(self, host_id): 129 match = 'hosts/host%d/' % host_id 130 for pidfile_id in self.nonfinished_pidfile_ids(): 131 if pidfile_id._working_directory.startswith(match): 132 self._set_pidfile_exit_status(pidfile_id, 0) 133 break 134 else: 135 raise KeyError('No active process matched %s' % match) 136 137 def _set_pidfile_exit_status(self, pidfile_id, exit_status): 138 assert pidfile_id is not None 139 contents = self._pidfiles[pidfile_id] 140 contents.exit_status = exit_status 141 contents.num_tests_failed = 0 142 143 144 def was_last_process_killed(self, pidfile_type, sigs): 145 pidfile_id = self._last_pidfile_id[pidfile_type] 146 return sigs == self._pids_to_signals_received[pidfile_id] 147 148 149 def nonfinished_pidfile_ids(self): 150 return [pidfile_id for pidfile_id, pidfile_contents 151 in self._pidfiles.iteritems() 152 if pidfile_contents.exit_status is None] 153 154 155 def running_pidfile_ids(self): 156 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids() 157 if self._pidfiles[pidfile_id].process is not None] 158 159 160 def pidfile_from_path(self, working_directory, pidfile_name): 161 return self._pidfile_index[(working_directory, pidfile_name)] 162 163 164 def attached_files(self, working_directory): 165 """ 166 Return dict mapping path to contents for attached files with specified 167 paths. 168 """ 169 return dict((path, contents) for path, contents 170 in self._attached_files.get(working_directory, []) 171 if path is not None) 172 173 174 # DroneManager emulation APIs for use by monitor_db 175 176 def get_orphaned_autoserv_processes(self): 177 return set() 178 179 180 def total_running_processes(self): 181 return sum(pidfile_id._num_processes 182 for pidfile_id in self.nonfinished_pidfile_ids()) 183 184 185 def max_runnable_processes(self, username, drone_hostnames_allowed): 186 return self.process_capacity - self.total_running_processes() 187 188 189 def refresh(self): 190 for pidfile_id in self._unregistered_pidfiles: 191 # intentionally handle non-registered pidfiles silently 192 self._pidfiles.pop(pidfile_id, None) 193 self._unregistered_pidfiles = set() 194 195 196 def execute_actions(self): 197 # executing an "execute_command" causes a pidfile to be created 198 for pidfile_id in self._future_pidfiles: 199 # Process objects are opaque to monitor_db 200 process = object() 201 self._pidfiles[pidfile_id].process = process 202 self._process_index[process] = pidfile_id 203 self._future_pidfiles = [] 204 205 for pidfile_id in self._set_pidfile_exit_status_queue: 206 self._set_pidfile_exit_status(pidfile_id, 271) 207 self._set_pidfile_exit_status_queue = [] 208 209 210 def attach_file_to_execution(self, result_dir, file_contents, 211 file_path=None): 212 self._attached_files.setdefault(result_dir, set()).add((file_path, 213 file_contents)) 214 return 'attach_path' 215 216 217 def _initialize_pidfile(self, pidfile_id): 218 if pidfile_id not in self._pidfiles: 219 assert pidfile_id.key() not in self._pidfile_index 220 self._pidfiles[pidfile_id] = drone_manager.PidfileContents() 221 self._pidfile_index[pidfile_id.key()] = pidfile_id 222 223 224 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name): 225 if working_directory.startswith('hosts/'): 226 # such paths look like hosts/host1/1-verify, we'll grab the end 227 type_string = working_directory.rsplit('-', 1)[1] 228 pidfile_type = _PidfileType.get_value(type_string) 229 else: 230 pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name] 231 self._last_pidfile_id[pidfile_type] = pidfile_id 232 233 234 def execute_command(self, command, working_directory, pidfile_name, 235 num_processes, log_file=None, paired_with_pidfile=None, 236 username=None, drone_hostnames_allowed=None): 237 logging.debug('Executing %s in %s', command, working_directory) 238 pidfile_id = self._DummyPidfileId(working_directory, pidfile_name) 239 if pidfile_id.key() in self._pidfile_index: 240 pidfile_id = self._pidfile_index[pidfile_id.key()] 241 pidfile_id._num_processes = num_processes 242 pidfile_id._paired_with_pidfile = paired_with_pidfile 243 244 self._future_pidfiles.append(pidfile_id) 245 self._initialize_pidfile(pidfile_id) 246 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id 247 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name) 248 return pidfile_id 249 250 251 def get_pidfile_contents(self, pidfile_id, use_second_read=False): 252 if pidfile_id not in self._pidfiles: 253 logging.debug('Request for nonexistent pidfile %s' % pidfile_id) 254 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents()) 255 256 257 def is_process_running(self, process): 258 return True 259 260 261 def register_pidfile(self, pidfile_id): 262 self._initialize_pidfile(pidfile_id) 263 264 265 def unregister_pidfile(self, pidfile_id): 266 self._unregistered_pidfiles.add(pidfile_id) 267 268 269 def declare_process_count(self, pidfile_id, num_processes): 270 pidfile_id.num_processes = num_processes 271 272 273 def absolute_path(self, path): 274 return 'absolute/' + path 275 276 277 def write_lines_to_file(self, file_path, lines, paired_with_process=None): 278 # TODO: record this 279 pass 280 281 282 def get_pidfile_id_from(self, execution_tag, pidfile_name): 283 default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name, 284 num_processes=0) 285 return self._pidfile_index.get((execution_tag, pidfile_name), 286 default_pidfile) 287 288 289 def kill_process(self, process, sig=signal.SIGKILL): 290 pidfile_id = self._process_index[process] 291 292 if pidfile_id not in self._pids_to_signals_received: 293 self._pids_to_signals_received[pidfile_id] = set() 294 self._pids_to_signals_received[pidfile_id].add(sig) 295 296 if signal.SIGKILL == sig: 297 self._set_pidfile_exit_status_queue.append(pidfile_id) 298 299 300class MockEmailManager(NullMethodObject): 301 _NULL_METHODS = ('send_queued_emails', 'send_email') 302 303 def enqueue_notify_email(self, subject, message): 304 logging.warning('enqueue_notify_email: %s', subject) 305 logging.warning(message) 306 307 308class SchedulerFunctionalTest(unittest.TestCase, 309 frontend_test_utils.FrontendTestMixin): 310 # some number of ticks after which the scheduler is presumed to have 311 # stabilized, given no external changes 312 _A_LOT_OF_TICKS = 10 313 314 def setUp(self): 315 self._frontend_common_setup() 316 self._set_stubs() 317 self._set_global_config_values() 318 self._create_dispatcher() 319 320 logging.basicConfig(level=logging.DEBUG) 321 322 323 def _create_dispatcher(self): 324 self.dispatcher = monitor_db.Dispatcher() 325 326 327 def tearDown(self): 328 self._database.disconnect() 329 self._frontend_common_teardown() 330 331 332 def _set_stubs(self): 333 self.mock_config = global_config.FakeGlobalConfig() 334 self.god.stub_with(global_config, 'global_config', self.mock_config) 335 336 self.mock_drone_manager = MockDroneManager() 337 drone_manager._set_instance(self.mock_drone_manager) 338 339 self.mock_email_manager = MockEmailManager() 340 self.god.stub_with(email_manager, 'manager', self.mock_email_manager) 341 342 self._database = ( 343 database_connection.TranslatingDatabase.get_test_database( 344 translators=scheduler_lib._DB_TRANSLATORS)) 345 self._database.connect(db_type='django') 346 self.god.stub_with(monitor_db, '_db', self._database) 347 self.god.stub_with(scheduler_models, '_db', self._database) 348 349 MockConnectionManager.db = self._database 350 scheduler_lib.ConnectionManager = MockConnectionManager 351 352 monitor_db.initialize_globals() 353 scheduler_models.initialize_globals() 354 355 356 def _set_global_config_values(self): 357 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins', 358 1) 359 self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins', 360 999999) 361 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True) 362 self.mock_config.set_config_value('SCHEDULER', 363 'clean_interval_minutes', 60) 364 self.mock_config.set_config_value('SCHEDULER', 365 'max_parse_processes', 50) 366 self.mock_config.set_config_value('SCHEDULER', 367 'max_transfer_processes', 50) 368 self.mock_config.set_config_value('SCHEDULER', 369 'clean_interval_minutes', 50) 370 self.mock_config.set_config_value('SCHEDULER', 371 'max_provision_retries', 1) 372 self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1) 373 self.mock_config.set_config_value( 374 'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600) 375 self.mock_config.set_config_value( 376 'SCHEDULER', 'inline_host_acquisition', True) 377 scheduler_config.config.read_config() 378 379 380 def _initialize_test(self): 381 self.dispatcher.initialize() 382 383 384 def _run_dispatcher(self): 385 for _ in xrange(self._A_LOT_OF_TICKS): 386 self.dispatcher.tick() 387 388 389 def test_idle(self): 390 self._initialize_test() 391 self._run_dispatcher() 392 393 394 def _assert_process_executed(self, working_directory, pidfile_name): 395 process_was_executed = self.mock_drone_manager.was_process_executed( 396 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE) 397 self.assert_(process_was_executed, 398 '%s/%s not executed' % (working_directory, pidfile_name)) 399 400 401 def _update_instance(self, model_instance): 402 return type(model_instance).objects.get(pk=model_instance.pk) 403 404 405 def _check_statuses(self, queue_entry, queue_entry_status, 406 host_status=None): 407 self._check_entry_status(queue_entry, queue_entry_status) 408 if host_status: 409 self._check_host_status(queue_entry.host, host_status) 410 411 412 def _check_entry_status(self, queue_entry, status): 413 # update from DB 414 queue_entry = self._update_instance(queue_entry) 415 self.assertEquals(queue_entry.status, status) 416 417 418 def _check_host_status(self, host, status): 419 # update from DB 420 host = self._update_instance(host) 421 self.assertEquals(host.status, status) 422 423 424 def _run_pre_job_verify(self, queue_entry): 425 self._run_dispatcher() # launches verify 426 self._check_statuses(queue_entry, HqeStatus.VERIFYING, 427 HostStatus.VERIFYING) 428 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 429 430 431 def test_simple_job(self): 432 self._initialize_test() 433 job, queue_entry = self._make_job_and_queue_entry() 434 self._run_pre_job_verify(queue_entry) 435 self._run_dispatcher() # launches job 436 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) 437 self._finish_job(queue_entry) 438 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 439 self._assert_nothing_is_running() 440 441 442 def _setup_for_pre_job_reset(self): 443 self._initialize_test() 444 job, queue_entry = self._make_job_and_queue_entry() 445 job.reboot_before = model_attributes.RebootBefore.ALWAYS 446 job.save() 447 return queue_entry 448 449 450 def _run_pre_job_reset_job(self, queue_entry): 451 self._run_dispatcher() # reset 452 self._check_statuses(queue_entry, HqeStatus.RESETTING, 453 HostStatus.RESETTING) 454 self.mock_drone_manager.finish_process(_PidfileType.RESET) 455 self._run_dispatcher() # job 456 self._finish_job(queue_entry) 457 458 459 def test_pre_job_reset(self): 460 queue_entry = self._setup_for_pre_job_reset() 461 self._run_pre_job_reset_job(queue_entry) 462 463 464 def _run_pre_job_reset_one_failure(self): 465 queue_entry = self._setup_for_pre_job_reset() 466 self._run_dispatcher() # reset 467 self.mock_drone_manager.finish_process(_PidfileType.RESET, 468 exit_status=256) 469 self._run_dispatcher() # repair 470 self._check_statuses(queue_entry, HqeStatus.QUEUED, 471 HostStatus.REPAIRING) 472 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 473 return queue_entry 474 475 476 def test_pre_job_reset_failure(self): 477 queue_entry = self._run_pre_job_reset_one_failure() 478 # from here the job should run as normal 479 self._run_pre_job_reset_job(queue_entry) 480 481 482 def test_pre_job_reset_double_failure(self): 483 # TODO (showard): this test isn't perfect. in reality, when the second 484 # reset fails, it copies its results over to the job directory using 485 # copy_results_on_drone() and then parses them. since we don't handle 486 # that, there appear to be no results at the job directory. the 487 # scheduler handles this gracefully, parsing gets effectively skipped, 488 # and this test passes as is. but we ought to properly test that 489 # behavior. 490 queue_entry = self._run_pre_job_reset_one_failure() 491 self._run_dispatcher() # second reset 492 self.mock_drone_manager.finish_process(_PidfileType.RESET, 493 exit_status=256) 494 self._run_dispatcher() 495 self._check_statuses(queue_entry, HqeStatus.FAILED, 496 HostStatus.REPAIR_FAILED) 497 # nothing else should run 498 self._assert_nothing_is_running() 499 500 501 def _assert_nothing_is_running(self): 502 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), []) 503 504 505 def _setup_for_post_job_cleanup(self): 506 self._initialize_test() 507 job, queue_entry = self._make_job_and_queue_entry() 508 job.reboot_after = model_attributes.RebootAfter.ALWAYS 509 job.save() 510 return queue_entry 511 512 513 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry, 514 include_verify=True): 515 if include_verify: 516 self._run_pre_job_verify(queue_entry) 517 self._run_dispatcher() # job 518 self.mock_drone_manager.finish_process(_PidfileType.JOB) 519 self._run_dispatcher() # parsing + cleanup 520 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 521 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 522 exit_status=256) 523 self._run_dispatcher() # repair, HQE unaffected 524 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 525 self._run_dispatcher() 526 return queue_entry 527 528 529 def test_post_job_cleanup_failure(self): 530 queue_entry = self._setup_for_post_job_cleanup() 531 self._run_post_job_cleanup_failure_up_to_repair(queue_entry) 532 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 533 HostStatus.REPAIRING) 534 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 535 self._run_dispatcher() 536 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 537 538 539 def test_post_job_cleanup_failure_repair_failure(self): 540 queue_entry = self._setup_for_post_job_cleanup() 541 self._run_post_job_cleanup_failure_up_to_repair(queue_entry) 542 self.mock_drone_manager.finish_process(_PidfileType.REPAIR, 543 exit_status=256) 544 self._run_dispatcher() 545 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 546 HostStatus.REPAIR_FAILED) 547 548 549 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type): 550 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type] 551 queue_entry = self._update_instance(queue_entry) 552 pidfile_id = self.mock_drone_manager.pidfile_from_path( 553 queue_entry.execution_path(), pidfile_name) 554 self.assert_(pidfile_id._paired_with_pidfile) 555 556 557 def _finish_job(self, queue_entry): 558 self._check_statuses(queue_entry, HqeStatus.RUNNING) 559 self.mock_drone_manager.finish_process(_PidfileType.JOB) 560 self._run_dispatcher() # launches parsing 561 self._check_statuses(queue_entry, HqeStatus.PARSING) 562 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE) 563 self._finish_parsing(queue_entry) 564 565 566 def _finish_parsing(self, queue_entry): 567 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 568 self._run_dispatcher() 569 570 self._check_entry_status(queue_entry, HqeStatus.ARCHIVING) 571 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 572 self._run_dispatcher() 573 574 575 def _create_reverify_request(self): 576 host = self.hosts[0] 577 models.SpecialTask.schedule_special_task( 578 host=host, task=models.SpecialTask.Task.VERIFY) 579 return host 580 581 582 def test_requested_reverify(self): 583 host = self._create_reverify_request() 584 self._run_dispatcher() 585 self._check_host_status(host, HostStatus.VERIFYING) 586 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 587 self._run_dispatcher() 588 self._check_host_status(host, HostStatus.READY) 589 590 591 def test_requested_reverify_failure(self): 592 host = self._create_reverify_request() 593 self._run_dispatcher() 594 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 595 exit_status=256) 596 self._run_dispatcher() # repair 597 self._check_host_status(host, HostStatus.REPAIRING) 598 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 599 self._run_dispatcher() 600 self._check_host_status(host, HostStatus.READY) 601 602 603 def _setup_for_do_not_verify(self): 604 self._initialize_test() 605 job, queue_entry = self._make_job_and_queue_entry() 606 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY 607 queue_entry.host.save() 608 return queue_entry 609 610 611 def test_do_not_verify_job(self): 612 queue_entry = self._setup_for_do_not_verify() 613 self._run_dispatcher() # runs job directly 614 self._finish_job(queue_entry) 615 616 617 def test_do_not_verify_job_with_cleanup(self): 618 queue_entry = self._setup_for_do_not_verify() 619 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS 620 queue_entry.job.save() 621 622 self._run_dispatcher() # cleanup 623 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 624 self._run_dispatcher() # job 625 self._finish_job(queue_entry) 626 627 628 def test_do_not_verify_pre_job_cleanup_failure(self): 629 queue_entry = self._setup_for_do_not_verify() 630 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS 631 queue_entry.job.save() 632 633 self._run_dispatcher() # cleanup 634 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 635 exit_status=256) 636 self._run_dispatcher() # failure ignored; job runs 637 self._finish_job(queue_entry) 638 639 640 def test_do_not_verify_post_job_cleanup_failure(self): 641 queue_entry = self._setup_for_do_not_verify() 642 queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS 643 queue_entry.job.save() 644 645 self._run_post_job_cleanup_failure_up_to_repair(queue_entry, 646 include_verify=False) 647 # failure ignored, host still set to Ready 648 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 649 self._run_dispatcher() # nothing else runs 650 self._assert_nothing_is_running() 651 652 653 def test_do_not_verify_requested_reverify_failure(self): 654 host = self._create_reverify_request() 655 host.protection = host_protections.Protection.DO_NOT_VERIFY 656 host.save() 657 658 self._run_dispatcher() 659 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 660 exit_status=256) 661 self._run_dispatcher() 662 self._check_host_status(host, HostStatus.READY) # ignore failure 663 self._assert_nothing_is_running() 664 665 666 def test_job_abort_in_verify(self): 667 self._initialize_test() 668 job = self._create_job(hosts=[1]) 669 queue_entries = list(job.hostqueueentry_set.all()) 670 self._run_dispatcher() # launches verify 671 self._check_statuses(queue_entries[0], HqeStatus.VERIFYING) 672 job.hostqueueentry_set.update(aborted=True) 673 self._run_dispatcher() # kills verify, launches cleanup 674 self.assert_(self.mock_drone_manager.was_last_process_killed( 675 _PidfileType.VERIFY, set([signal.SIGKILL]))) 676 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 677 self._run_dispatcher() 678 679 680 def test_job_abort(self): 681 self._initialize_test() 682 job = self._create_job(hosts=[1]) 683 job.run_reset = False 684 job.save() 685 queue_entries = list(job.hostqueueentry_set.all()) 686 687 self._run_dispatcher() # launches job 688 689 self._check_statuses(queue_entries[0], HqeStatus.RUNNING) 690 691 job.hostqueueentry_set.update(aborted=True) 692 693 self._run_dispatcher() # kills job, launches gathering 694 695 self._check_statuses(queue_entries[0], HqeStatus.GATHERING) 696 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 697 self._run_dispatcher() # launches parsing + cleanup 698 queue_entry = job.hostqueueentry_set.all()[0] 699 self._finish_parsing(queue_entry) 700 # The abort will cause gathering to launch a cleanup. 701 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 702 self._run_dispatcher() 703 704 705 def test_job_abort_queued_synchronous(self): 706 self._initialize_test() 707 job = self._create_job(hosts=[1,2]) 708 job.synch_count = 2 709 job.save() 710 711 job.hostqueueentry_set.update(aborted=True) 712 self._run_dispatcher() 713 for host_queue_entry in job.hostqueueentry_set.all(): 714 self.assertEqual(host_queue_entry.status, 715 HqeStatus.ABORTED) 716 717 718 def test_no_pidfile_leaking(self): 719 self._initialize_test() 720 721 self.test_simple_job() 722 self.mock_drone_manager.refresh() 723 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 724 725 self.test_job_abort_in_verify() 726 self.mock_drone_manager.refresh() 727 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 728 729 self.test_job_abort() 730 self.mock_drone_manager.refresh() 731 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 732 733 734 def _make_job_and_queue_entry(self): 735 job = self._create_job(hosts=[1]) 736 queue_entry = job.hostqueueentry_set.all()[0] 737 return job, queue_entry 738 739 740 def test_recover_running_no_process(self): 741 # recovery should re-execute a Running HQE if no process is found 742 _, queue_entry = self._make_job_and_queue_entry() 743 queue_entry.status = HqeStatus.RUNNING 744 queue_entry.execution_subdir = '1-myuser/host1' 745 queue_entry.save() 746 queue_entry.host.status = HostStatus.RUNNING 747 queue_entry.host.save() 748 749 self._initialize_test() 750 self._run_dispatcher() 751 self._finish_job(queue_entry) 752 753 754 def test_recover_verifying_hqe_no_special_task(self): 755 # recovery should move a Resetting HQE with no corresponding 756 # Verify or Reset SpecialTask back to Queued. 757 _, queue_entry = self._make_job_and_queue_entry() 758 queue_entry.status = HqeStatus.RESETTING 759 queue_entry.save() 760 761 # make some dummy SpecialTasks that shouldn't count 762 models.SpecialTask.objects.create( 763 host=queue_entry.host, 764 task=models.SpecialTask.Task.RESET, 765 requested_by=models.User.current_user()) 766 models.SpecialTask.objects.create( 767 host=queue_entry.host, 768 task=models.SpecialTask.Task.CLEANUP, 769 queue_entry=queue_entry, 770 is_complete=True, 771 requested_by=models.User.current_user()) 772 773 self._initialize_test() 774 self._check_statuses(queue_entry, HqeStatus.QUEUED) 775 776 777 def _test_recover_verifying_hqe_helper(self, task, pidfile_type): 778 _, queue_entry = self._make_job_and_queue_entry() 779 queue_entry.status = HqeStatus.VERIFYING 780 queue_entry.save() 781 782 special_task = models.SpecialTask.objects.create( 783 host=queue_entry.host, task=task, queue_entry=queue_entry) 784 785 self._initialize_test() 786 self._run_dispatcher() 787 self.mock_drone_manager.finish_process(pidfile_type) 788 self._run_dispatcher() 789 # don't bother checking the rest of the job execution, as long as the 790 # SpecialTask ran 791 792 793 def test_recover_verifying_hqe_with_cleanup(self): 794 # recover an HQE that was in pre-job cleanup 795 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP, 796 _PidfileType.CLEANUP) 797 798 799 def test_recover_verifying_hqe_with_verify(self): 800 # recover an HQE that was in pre-job verify 801 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY, 802 _PidfileType.VERIFY) 803 804 805 def test_recover_parsing(self): 806 self._initialize_test() 807 job, queue_entry = self._make_job_and_queue_entry() 808 job.run_verify = False 809 job.run_reset = False 810 job.reboot_after = model_attributes.RebootAfter.NEVER 811 job.save() 812 813 self._run_dispatcher() # launches job 814 self.mock_drone_manager.finish_process(_PidfileType.JOB) 815 self._run_dispatcher() # launches parsing 816 817 # now "restart" the scheduler 818 self._create_dispatcher() 819 self._initialize_test() 820 self._run_dispatcher() 821 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 822 self._run_dispatcher() 823 824 825 def test_recover_parsing__no_process_already_aborted(self): 826 _, queue_entry = self._make_job_and_queue_entry() 827 queue_entry.execution_subdir = 'host1' 828 queue_entry.status = HqeStatus.PARSING 829 queue_entry.aborted = True 830 queue_entry.save() 831 832 self._initialize_test() 833 self._run_dispatcher() 834 835 836 def test_job_scheduled_just_after_abort(self): 837 # test a pretty obscure corner case where a job is aborted while queued, 838 # another job is ready to run, and throttling is active. the post-abort 839 # cleanup must not be pre-empted by the second job. 840 # This test kind of doesn't make sense anymore after verify+cleanup 841 # were merged into reset. It should maybe just be removed. 842 job1, queue_entry1 = self._make_job_and_queue_entry() 843 queue_entry1.save() 844 job2, queue_entry2 = self._make_job_and_queue_entry() 845 job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY 846 job2.save() 847 848 self.mock_drone_manager.process_capacity = 0 849 self._run_dispatcher() # schedule job1, but won't start verify 850 job1.hostqueueentry_set.update(aborted=True) 851 self.mock_drone_manager.process_capacity = 100 852 self._run_dispatcher() # reset must run here, not verify for job2 853 self._check_statuses(queue_entry1, HqeStatus.ABORTED, 854 HostStatus.RESETTING) 855 self.mock_drone_manager.finish_process(_PidfileType.RESET) 856 self._run_dispatcher() # now verify starts for job2 857 self._check_statuses(queue_entry2, HqeStatus.RUNNING, 858 HostStatus.RUNNING) 859 860 861 def test_reverify_interrupting_pre_job(self): 862 # ensure things behave sanely if a reverify is scheduled in the middle 863 # of pre-job actions 864 _, queue_entry = self._make_job_and_queue_entry() 865 866 self._run_dispatcher() # pre-job verify 867 self._create_reverify_request() 868 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 869 exit_status=256) 870 self._run_dispatcher() # repair 871 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 872 self._run_dispatcher() # reverify runs now 873 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 874 self._run_dispatcher() # pre-job verify 875 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 876 self._run_dispatcher() # and job runs... 877 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) 878 self._finish_job(queue_entry) # reverify has been deleted 879 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 880 HostStatus.READY) 881 self._assert_nothing_is_running() 882 883 884 def test_reverify_while_job_running(self): 885 # once a job is running, a reverify must not be allowed to preempt 886 # Gathering 887 _, queue_entry = self._make_job_and_queue_entry() 888 self._run_pre_job_verify(queue_entry) 889 self._run_dispatcher() # job runs 890 self._create_reverify_request() 891 # make job end with a signal, so gathering will run 892 self.mock_drone_manager.finish_process(_PidfileType.JOB, 893 exit_status=271) 894 self._run_dispatcher() # gathering must start 895 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 896 self._run_dispatcher() # parsing and cleanup 897 self._finish_parsing(queue_entry) 898 self._run_dispatcher() # now reverify runs 899 self._check_statuses(queue_entry, HqeStatus.FAILED, 900 HostStatus.VERIFYING) 901 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 902 self._run_dispatcher() 903 self._check_host_status(queue_entry.host, HostStatus.READY) 904 905 906 def test_reverify_while_host_pending(self): 907 # ensure that if a reverify is scheduled while a host is in Pending, it 908 # won't run until the host is actually free 909 job = self._create_job(hosts=[1,2]) 910 queue_entry = job.hostqueueentry_set.get(host__hostname='host1') 911 job.synch_count = 2 912 job.save() 913 914 host2 = self.hosts[1] 915 host2.locked = True 916 host2.save() 917 918 self._run_dispatcher() # verify host1 919 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 920 self._run_dispatcher() # host1 Pending 921 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) 922 self._create_reverify_request() 923 self._run_dispatcher() # nothing should happen here 924 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) 925 926 # now let the job run 927 host2.locked = False 928 host2.save() 929 self._run_dispatcher() # verify host2 930 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 931 self._run_dispatcher() # run job 932 self._finish_job(queue_entry) 933 # the reverify should now be running 934 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 935 HostStatus.VERIFYING) 936 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 937 self._run_dispatcher() 938 self._check_host_status(queue_entry.host, HostStatus.READY) 939 940 941 def test_throttling(self): 942 job = self._create_job(hosts=[1,2,3]) 943 job.synch_count = 3 944 job.save() 945 946 queue_entries = list(job.hostqueueentry_set.all()) 947 def _check_hqe_statuses(*statuses): 948 for queue_entry, status in zip(queue_entries, statuses): 949 self._check_statuses(queue_entry, status) 950 951 self.mock_drone_manager.process_capacity = 2 952 self._run_dispatcher() # verify runs on 1 and 2 953 queue_entries = list(job.hostqueueentry_set.all()) 954 _check_hqe_statuses(HqeStatus.QUEUED, 955 HqeStatus.VERIFYING, HqeStatus.VERIFYING) 956 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2) 957 958 self.mock_drone_manager.finish_specific_process( 959 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE) 960 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 961 self._run_dispatcher() # verify runs on 3 962 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING, 963 HqeStatus.PENDING) 964 965 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 966 self._run_dispatcher() # job won't run due to throttling 967 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING, 968 HqeStatus.STARTING) 969 self._assert_nothing_is_running() 970 971 self.mock_drone_manager.process_capacity = 3 972 self._run_dispatcher() # now job runs 973 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING, 974 HqeStatus.RUNNING) 975 976 self.mock_drone_manager.process_capacity = 2 977 self.mock_drone_manager.finish_process(_PidfileType.JOB, 978 exit_status=271) 979 self._run_dispatcher() # gathering won't run due to throttling 980 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING, 981 HqeStatus.GATHERING) 982 self._assert_nothing_is_running() 983 984 self.mock_drone_manager.process_capacity = 3 985 self._run_dispatcher() # now gathering runs 986 987 self.mock_drone_manager.process_capacity = 0 988 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 989 self._run_dispatcher() # parsing runs despite throttling 990 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING, 991 HqeStatus.PARSING) 992 993 994 def test_abort_starting_while_throttling(self): 995 self._initialize_test() 996 job = self._create_job(hosts=[1,2], synchronous=True) 997 queue_entry = job.hostqueueentry_set.all()[0] 998 job.run_verify = False 999 job.run_reset = False 1000 job.reboot_after = model_attributes.RebootAfter.NEVER 1001 job.save() 1002 1003 self.mock_drone_manager.process_capacity = 0 1004 self._run_dispatcher() # go to starting, but don't start job 1005 self._check_statuses(queue_entry, HqeStatus.STARTING, 1006 HostStatus.PENDING) 1007 1008 job.hostqueueentry_set.update(aborted=True) 1009 self._run_dispatcher() 1010 self._check_statuses(queue_entry, HqeStatus.GATHERING, 1011 HostStatus.RUNNING) 1012 1013 self.mock_drone_manager.process_capacity = 5 1014 self._run_dispatcher() 1015 self._check_statuses(queue_entry, HqeStatus.ABORTED, 1016 HostStatus.CLEANING) 1017 1018 1019 def test_simple_metahost_assignment(self): 1020 job = self._create_job(metahosts=[1]) 1021 self._run_dispatcher() 1022 entry = job.hostqueueentry_set.all()[0] 1023 self.assertEquals(entry.host.hostname, 'host1') 1024 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) 1025 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 1026 self._run_dispatcher() 1027 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) 1028 # rest of job proceeds normally 1029 1030 1031 def test_metahost_fail_verify(self): 1032 self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2 1033 job = self._create_job(metahosts=[1]) 1034 self._run_dispatcher() # assigned to host1 1035 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 1036 exit_status=256) 1037 self._run_dispatcher() # host1 failed, gets reassigned to host2 1038 entry = job.hostqueueentry_set.all()[0] 1039 self.assertEquals(entry.host.hostname, 'host2') 1040 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) 1041 self._check_host_status(self.hosts[0], HostStatus.REPAIRING) 1042 1043 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 1044 self._run_dispatcher() 1045 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) 1046 1047 1048 def test_hostless_job(self): 1049 job = self._create_job(hostless=True) 1050 entry = job.hostqueueentry_set.all()[0] 1051 1052 self._run_dispatcher() 1053 self._check_entry_status(entry, HqeStatus.RUNNING) 1054 1055 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1056 self._run_dispatcher() 1057 self._check_entry_status(entry, HqeStatus.PARSING) 1058 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1059 self._run_dispatcher() 1060 self._check_entry_status(entry, HqeStatus.ARCHIVING) 1061 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 1062 self._run_dispatcher() 1063 self._check_entry_status(entry, HqeStatus.COMPLETED) 1064 1065 1066 def test_pre_job_keyvals(self): 1067 job = self._create_job(hosts=[1]) 1068 job.run_verify = False 1069 job.run_reset = False 1070 job.reboot_before = model_attributes.RebootBefore.NEVER 1071 job.save() 1072 models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue') 1073 1074 self._run_dispatcher() 1075 self._finish_job(job.hostqueueentry_set.all()[0]) 1076 1077 attached_files = self.mock_drone_manager.attached_files( 1078 '1-autotest_system/host1') 1079 job_keyval_path = '1-autotest_system/host1/keyval' 1080 self.assert_(job_keyval_path in attached_files, attached_files) 1081 keyval_contents = attached_files[job_keyval_path] 1082 keyval_dict = dict(line.strip().split('=', 1) 1083 for line in keyval_contents.splitlines()) 1084 self.assert_('job_queued' in keyval_dict, keyval_dict) 1085 self.assertEquals(keyval_dict['mykey'], 'myvalue') 1086 1087 1088# This tests the scheduler functions with archiving step disabled 1089class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest): 1090 def _set_global_config_values(self): 1091 super(SchedulerFunctionalTestNoArchiving, self 1092 )._set_global_config_values() 1093 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', 1094 False) 1095 1096 1097 def _finish_parsing(self, queue_entry): 1098 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1099 self._run_dispatcher() 1100 1101 1102 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry, 1103 include_verify=True): 1104 if include_verify: 1105 self._run_pre_job_verify(queue_entry) 1106 self._run_dispatcher() # job 1107 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1108 self._run_dispatcher() # parsing + cleanup 1109 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1110 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 1111 exit_status=256) 1112 self._run_dispatcher() # repair, HQE unaffected 1113 return queue_entry 1114 1115 1116 def test_hostless_job(self): 1117 job = self._create_job(hostless=True) 1118 entry = job.hostqueueentry_set.all()[0] 1119 1120 self._run_dispatcher() 1121 self._check_entry_status(entry, HqeStatus.RUNNING) 1122 1123 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1124 self._run_dispatcher() 1125 self._check_entry_status(entry, HqeStatus.PARSING) 1126 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1127 self._run_dispatcher() 1128 self._check_entry_status(entry, HqeStatus.COMPLETED) 1129 1130 def test_synchronous_with_reset(self): 1131 # For crbug/621257. 1132 job = self._create_job(hosts=[1, 2]) 1133 job.synch_count = 2 1134 job.reboot_before = model_attributes.RebootBefore.ALWAYS 1135 job.save() 1136 1137 hqe1 = job.hostqueueentry_set.get(host__hostname='host1') 1138 hqe2 = job.hostqueueentry_set.get(host__hostname='host2') 1139 1140 self._run_dispatcher() 1141 1142 self._check_statuses(hqe1, HqeStatus.RESETTING, HostStatus.RESETTING) 1143 self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING) 1144 1145 self.mock_drone_manager.finish_active_process_on_host(1) 1146 self._run_dispatcher() 1147 1148 self._check_statuses(hqe1, HqeStatus.PENDING, HostStatus.PENDING) 1149 self._check_statuses(hqe2, HqeStatus.RESETTING, HostStatus.RESETTING) 1150 1151 self.mock_drone_manager.finish_active_process_on_host(2) 1152 self._run_dispatcher() 1153 1154 self._check_statuses(hqe1, HqeStatus.RUNNING, HostStatus.RUNNING) 1155 self._check_statuses(hqe2, HqeStatus.RUNNING, HostStatus.RUNNING) 1156 1157 1158if __name__ == '__main__': 1159 unittest.main() 1160