1#!/usr/bin/python 2 3import logging, os, signal, unittest 4import common 5from autotest_lib.client.common_lib import enum, global_config, host_protections 6from autotest_lib.database import database_connection 7from autotest_lib.frontend import setup_django_environment 8from autotest_lib.frontend.afe import frontend_test_utils, models 9from autotest_lib.frontend.afe import model_attributes 10from autotest_lib.scheduler import drone_manager, email_manager, host_scheduler 11from autotest_lib.scheduler import monitor_db, scheduler_models 12from autotest_lib.scheduler import scheduler_config 13from autotest_lib.scheduler import scheduler_lib 14 15HqeStatus = models.HostQueueEntry.Status 16HostStatus = models.Host.Status 17 18class NullMethodObject(object): 19 _NULL_METHODS = () 20 21 def __init__(self): 22 def null_method(*args, **kwargs): 23 pass 24 25 for method_name in self._NULL_METHODS: 26 setattr(self, method_name, null_method) 27 28class MockGlobalConfig(object): 29 def __init__(self): 30 self._config_info = {} 31 32 33 def set_config_value(self, section, key, value): 34 self._config_info[(section, key)] = value 35 36 37 def get_config_value(self, section, key, type=str, 38 default=None, allow_blank=False): 39 identifier = (section, key) 40 if identifier not in self._config_info: 41 return default 42 return self._config_info[identifier] 43 44 45 def parse_config_file(self): 46 pass 47 48 49# the SpecialTask names here must match the suffixes used on the SpecialTask 50# results directories 51_PidfileType = enum.Enum('verify', 'cleanup', 'repair', 'job', 'gather', 52 'parse', 'archive', 'reset', 'provision') 53 54 55_PIDFILE_TO_PIDFILE_TYPE = { 56 drone_manager.AUTOSERV_PID_FILE: _PidfileType.JOB, 57 drone_manager.CRASHINFO_PID_FILE: _PidfileType.GATHER, 58 drone_manager.PARSER_PID_FILE: _PidfileType.PARSE, 59 drone_manager.ARCHIVER_PID_FILE: _PidfileType.ARCHIVE, 60 } 61 62 63_PIDFILE_TYPE_TO_PIDFILE = dict((value, key) for key, value 64 in _PIDFILE_TO_PIDFILE_TYPE.iteritems()) 65 66 67class MockConnectionManager(object): 68 """docstring for MockConnectionManager""" 69 70 db = None 71 72 def __init__(self): 73 super(MockConnectionManager, self).__init__() 74 75 def get_connection(self): 76 assert MockConnectionManager.db 77 return MockConnectionManager.db 78 79 80class MockDroneManager(NullMethodObject): 81 """ 82 Public attributes: 83 max_runnable_processes_value: value returned by max_runnable_processes(). 84 tests can change this to activate throttling. 85 """ 86 _NULL_METHODS = ('reinitialize_drones', 'copy_to_results_repository', 87 'copy_results_on_drone', 'trigger_refresh', 'sync_refresh') 88 89 class _DummyPidfileId(object): 90 """ 91 Object to represent pidfile IDs that is opaque to the scheduler code but 92 still debugging-friendly for us. 93 """ 94 def __init__(self, working_directory, pidfile_name, num_processes=None): 95 self._working_directory = working_directory 96 self._pidfile_name = pidfile_name 97 self._num_processes = num_processes 98 self._paired_with_pidfile = None 99 100 101 def key(self): 102 """Key for MockDroneManager._pidfile_index""" 103 return (self._working_directory, self._pidfile_name) 104 105 106 def __str__(self): 107 return os.path.join(self._working_directory, self._pidfile_name) 108 109 110 def __repr__(self): 111 return '<_DummyPidfileId: %s>' % str(self) 112 113 114 def __init__(self): 115 super(MockDroneManager, self).__init__() 116 self.process_capacity = 100 117 118 # maps result_dir to set of tuples (file_path, file_contents) 119 self._attached_files = {} 120 # maps pidfile IDs to PidfileContents 121 self._pidfiles = {} 122 # pidfile IDs that haven't been created yet 123 self._future_pidfiles = [] 124 # maps _PidfileType to the most recently created pidfile ID of that type 125 self._last_pidfile_id = {} 126 # maps (working_directory, pidfile_name) to pidfile IDs 127 self._pidfile_index = {} 128 # maps process to pidfile IDs 129 self._process_index = {} 130 # tracks pidfiles of processes that have been killed 131 self._pids_to_signals_received = {} 132 # pidfile IDs that have just been unregistered (so will disappear on the 133 # next cycle) 134 self._unregistered_pidfiles = set() 135 # Pids to write exit status for at end of tick 136 self._set_pidfile_exit_status_queue = [] 137 138 # utility APIs for use by the test 139 140 def finish_process(self, pidfile_type, exit_status=0): 141 pidfile_id = self._last_pidfile_id[pidfile_type] 142 self._set_pidfile_exit_status(pidfile_id, exit_status) 143 144 145 def finish_specific_process(self, working_directory, pidfile_name): 146 pidfile_id = self.pidfile_from_path(working_directory, pidfile_name) 147 self._set_pidfile_exit_status(pidfile_id, 0) 148 149 150 def _set_pidfile_exit_status(self, pidfile_id, exit_status): 151 assert pidfile_id is not None 152 contents = self._pidfiles[pidfile_id] 153 contents.exit_status = exit_status 154 contents.num_tests_failed = 0 155 156 157 def was_last_process_killed(self, pidfile_type, sigs): 158 pidfile_id = self._last_pidfile_id[pidfile_type] 159 return sigs == self._pids_to_signals_received[pidfile_id] 160 161 162 def nonfinished_pidfile_ids(self): 163 return [pidfile_id for pidfile_id, pidfile_contents 164 in self._pidfiles.iteritems() 165 if pidfile_contents.exit_status is None] 166 167 168 def running_pidfile_ids(self): 169 return [pidfile_id for pidfile_id in self.nonfinished_pidfile_ids() 170 if self._pidfiles[pidfile_id].process is not None] 171 172 173 def pidfile_from_path(self, working_directory, pidfile_name): 174 return self._pidfile_index[(working_directory, pidfile_name)] 175 176 177 def attached_files(self, working_directory): 178 """ 179 Return dict mapping path to contents for attached files with specified 180 paths. 181 """ 182 return dict((path, contents) for path, contents 183 in self._attached_files.get(working_directory, []) 184 if path is not None) 185 186 187 # DroneManager emulation APIs for use by monitor_db 188 189 def get_orphaned_autoserv_processes(self): 190 return set() 191 192 193 def total_running_processes(self): 194 return sum(pidfile_id._num_processes 195 for pidfile_id in self.nonfinished_pidfile_ids()) 196 197 198 def max_runnable_processes(self, username, drone_hostnames_allowed): 199 return self.process_capacity - self.total_running_processes() 200 201 202 def refresh(self): 203 for pidfile_id in self._unregistered_pidfiles: 204 # intentionally handle non-registered pidfiles silently 205 self._pidfiles.pop(pidfile_id, None) 206 self._unregistered_pidfiles = set() 207 208 209 def execute_actions(self): 210 # executing an "execute_command" causes a pidfile to be created 211 for pidfile_id in self._future_pidfiles: 212 # Process objects are opaque to monitor_db 213 process = object() 214 self._pidfiles[pidfile_id].process = process 215 self._process_index[process] = pidfile_id 216 self._future_pidfiles = [] 217 218 for pidfile_id in self._set_pidfile_exit_status_queue: 219 self._set_pidfile_exit_status(pidfile_id, 271) 220 self._set_pidfile_exit_status_queue = [] 221 222 223 def attach_file_to_execution(self, result_dir, file_contents, 224 file_path=None): 225 self._attached_files.setdefault(result_dir, set()).add((file_path, 226 file_contents)) 227 return 'attach_path' 228 229 230 def _initialize_pidfile(self, pidfile_id): 231 if pidfile_id not in self._pidfiles: 232 assert pidfile_id.key() not in self._pidfile_index 233 self._pidfiles[pidfile_id] = drone_manager.PidfileContents() 234 self._pidfile_index[pidfile_id.key()] = pidfile_id 235 236 237 def _set_last_pidfile(self, pidfile_id, working_directory, pidfile_name): 238 if working_directory.startswith('hosts/'): 239 # such paths look like hosts/host1/1-verify, we'll grab the end 240 type_string = working_directory.rsplit('-', 1)[1] 241 pidfile_type = _PidfileType.get_value(type_string) 242 else: 243 pidfile_type = _PIDFILE_TO_PIDFILE_TYPE[pidfile_name] 244 self._last_pidfile_id[pidfile_type] = pidfile_id 245 246 247 def execute_command(self, command, working_directory, pidfile_name, 248 num_processes, log_file=None, paired_with_pidfile=None, 249 username=None, drone_hostnames_allowed=None): 250 logging.debug('Executing %s in %s', command, working_directory) 251 pidfile_id = self._DummyPidfileId(working_directory, pidfile_name) 252 if pidfile_id.key() in self._pidfile_index: 253 pidfile_id = self._pidfile_index[pidfile_id.key()] 254 pidfile_id._num_processes = num_processes 255 pidfile_id._paired_with_pidfile = paired_with_pidfile 256 257 self._future_pidfiles.append(pidfile_id) 258 self._initialize_pidfile(pidfile_id) 259 self._pidfile_index[(working_directory, pidfile_name)] = pidfile_id 260 self._set_last_pidfile(pidfile_id, working_directory, pidfile_name) 261 return pidfile_id 262 263 264 def get_pidfile_contents(self, pidfile_id, use_second_read=False): 265 if pidfile_id not in self._pidfiles: 266 logging.debug('Request for nonexistent pidfile %s' % pidfile_id) 267 return self._pidfiles.get(pidfile_id, drone_manager.PidfileContents()) 268 269 270 def is_process_running(self, process): 271 return True 272 273 274 def register_pidfile(self, pidfile_id): 275 self._initialize_pidfile(pidfile_id) 276 277 278 def unregister_pidfile(self, pidfile_id): 279 self._unregistered_pidfiles.add(pidfile_id) 280 281 282 def declare_process_count(self, pidfile_id, num_processes): 283 pidfile_id.num_processes = num_processes 284 285 286 def absolute_path(self, path): 287 return 'absolute/' + path 288 289 290 def write_lines_to_file(self, file_path, lines, paired_with_process=None): 291 # TODO: record this 292 pass 293 294 295 def get_pidfile_id_from(self, execution_tag, pidfile_name): 296 default_pidfile = self._DummyPidfileId(execution_tag, pidfile_name, 297 num_processes=0) 298 return self._pidfile_index.get((execution_tag, pidfile_name), 299 default_pidfile) 300 301 302 def kill_process(self, process, sig=signal.SIGKILL): 303 pidfile_id = self._process_index[process] 304 305 if pidfile_id not in self._pids_to_signals_received: 306 self._pids_to_signals_received[pidfile_id] = set() 307 self._pids_to_signals_received[pidfile_id].add(sig) 308 309 if signal.SIGKILL == sig: 310 self._set_pidfile_exit_status_queue.append(pidfile_id) 311 312 313class MockEmailManager(NullMethodObject): 314 _NULL_METHODS = ('send_queued_emails', 'send_email') 315 316 def enqueue_notify_email(self, subject, message): 317 logging.warning('enqueue_notify_email: %s', subject) 318 logging.warning(message) 319 320 321class SchedulerFunctionalTest(unittest.TestCase, 322 frontend_test_utils.FrontendTestMixin): 323 # some number of ticks after which the scheduler is presumed to have 324 # stabilized, given no external changes 325 _A_LOT_OF_TICKS = 10 326 327 def setUp(self): 328 self._frontend_common_setup() 329 self._set_stubs() 330 self._set_global_config_values() 331 self._create_dispatcher() 332 333 logging.basicConfig(level=logging.DEBUG) 334 335 336 def _create_dispatcher(self): 337 self.dispatcher = monitor_db.Dispatcher() 338 339 340 def tearDown(self): 341 self._database.disconnect() 342 self._frontend_common_teardown() 343 344 345 def _set_stubs(self): 346 self.mock_config = MockGlobalConfig() 347 self.god.stub_with(global_config, 'global_config', self.mock_config) 348 349 self.mock_drone_manager = MockDroneManager() 350 drone_manager._set_instance(self.mock_drone_manager) 351 352 self.mock_email_manager = MockEmailManager() 353 self.god.stub_with(email_manager, 'manager', self.mock_email_manager) 354 355 self._database = ( 356 database_connection.TranslatingDatabase.get_test_database( 357 translators=scheduler_lib._DB_TRANSLATORS)) 358 self._database.connect(db_type='django') 359 self.god.stub_with(monitor_db, '_db', self._database) 360 self.god.stub_with(scheduler_models, '_db', self._database) 361 362 MockConnectionManager.db = self._database 363 scheduler_lib.ConnectionManager = MockConnectionManager 364 365 monitor_db.initialize_globals() 366 scheduler_models.initialize_globals() 367 368 369 def _set_global_config_values(self): 370 self.mock_config.set_config_value('SCHEDULER', 'pidfile_timeout_mins', 371 1) 372 self.mock_config.set_config_value('SCHEDULER', 'gc_stats_interval_mins', 373 999999) 374 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', True) 375 self.mock_config.set_config_value('SCHEDULER', 376 'clean_interval_minutes', 60) 377 self.mock_config.set_config_value('SCHEDULER', 378 'max_parse_processes', 50) 379 self.mock_config.set_config_value('SCHEDULER', 380 'max_transfer_processes', 50) 381 self.mock_config.set_config_value('SCHEDULER', 382 'clean_interval_minutes', 50) 383 self.mock_config.set_config_value('SCHEDULER', 384 'max_provision_retries', 1) 385 self.mock_config.set_config_value('SCHEDULER', 'max_repair_limit', 1) 386 self.mock_config.set_config_value( 387 'SCHEDULER', 'secs_to_wait_for_atomic_group_hosts', 600) 388 scheduler_config.config.read_config() 389 390 391 def _initialize_test(self): 392 self.dispatcher.initialize() 393 394 395 def _run_dispatcher(self): 396 for _ in xrange(self._A_LOT_OF_TICKS): 397 self.dispatcher.tick() 398 399 400 def test_idle(self): 401 self._initialize_test() 402 self._run_dispatcher() 403 404 405 def _assert_process_executed(self, working_directory, pidfile_name): 406 process_was_executed = self.mock_drone_manager.was_process_executed( 407 'hosts/host1/1-verify', drone_manager.AUTOSERV_PID_FILE) 408 self.assert_(process_was_executed, 409 '%s/%s not executed' % (working_directory, pidfile_name)) 410 411 412 def _update_instance(self, model_instance): 413 return type(model_instance).objects.get(pk=model_instance.pk) 414 415 416 def _check_statuses(self, queue_entry, queue_entry_status, 417 host_status=None): 418 self._check_entry_status(queue_entry, queue_entry_status) 419 if host_status: 420 self._check_host_status(queue_entry.host, host_status) 421 422 423 def _check_entry_status(self, queue_entry, status): 424 # update from DB 425 queue_entry = self._update_instance(queue_entry) 426 self.assertEquals(queue_entry.status, status) 427 428 429 def _check_host_status(self, host, status): 430 # update from DB 431 host = self._update_instance(host) 432 self.assertEquals(host.status, status) 433 434 435 def _run_pre_job_verify(self, queue_entry): 436 self._run_dispatcher() # launches verify 437 self._check_statuses(queue_entry, HqeStatus.VERIFYING, 438 HostStatus.VERIFYING) 439 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 440 441 442 def test_simple_job(self): 443 self._initialize_test() 444 job, queue_entry = self._make_job_and_queue_entry() 445 self._run_pre_job_verify(queue_entry) 446 self._run_dispatcher() # launches job 447 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) 448 self._finish_job(queue_entry) 449 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 450 self._assert_nothing_is_running() 451 452 453 def _setup_for_pre_job_reset(self): 454 self._initialize_test() 455 job, queue_entry = self._make_job_and_queue_entry() 456 job.reboot_before = model_attributes.RebootBefore.ALWAYS 457 job.save() 458 return queue_entry 459 460 461 def _run_pre_job_reset_job(self, queue_entry): 462 self._run_dispatcher() # reset 463 self._check_statuses(queue_entry, HqeStatus.RESETTING, 464 HostStatus.RESETTING) 465 self.mock_drone_manager.finish_process(_PidfileType.RESET) 466 self._run_dispatcher() # job 467 self._finish_job(queue_entry) 468 469 470 def test_pre_job_reset(self): 471 queue_entry = self._setup_for_pre_job_reset() 472 self._run_pre_job_reset_job(queue_entry) 473 474 475 def _run_pre_job_reset_one_failure(self): 476 queue_entry = self._setup_for_pre_job_reset() 477 self._run_dispatcher() # reset 478 self.mock_drone_manager.finish_process(_PidfileType.RESET, 479 exit_status=256) 480 self._run_dispatcher() # repair 481 self._check_statuses(queue_entry, HqeStatus.QUEUED, 482 HostStatus.REPAIRING) 483 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 484 return queue_entry 485 486 487 def test_pre_job_reset_failure(self): 488 queue_entry = self._run_pre_job_reset_one_failure() 489 # from here the job should run as normal 490 self._run_pre_job_reset_job(queue_entry) 491 492 493 def test_pre_job_reset_double_failure(self): 494 # TODO (showard): this test isn't perfect. in reality, when the second 495 # reset fails, it copies its results over to the job directory using 496 # copy_results_on_drone() and then parses them. since we don't handle 497 # that, there appear to be no results at the job directory. the 498 # scheduler handles this gracefully, parsing gets effectively skipped, 499 # and this test passes as is. but we ought to properly test that 500 # behavior. 501 queue_entry = self._run_pre_job_reset_one_failure() 502 self._run_dispatcher() # second reset 503 self.mock_drone_manager.finish_process(_PidfileType.RESET, 504 exit_status=256) 505 self._run_dispatcher() 506 self._check_statuses(queue_entry, HqeStatus.FAILED, 507 HostStatus.REPAIR_FAILED) 508 # nothing else should run 509 self._assert_nothing_is_running() 510 511 512 def _assert_nothing_is_running(self): 513 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), []) 514 515 516 def _setup_for_post_job_cleanup(self): 517 self._initialize_test() 518 job, queue_entry = self._make_job_and_queue_entry() 519 job.reboot_after = model_attributes.RebootAfter.ALWAYS 520 job.save() 521 return queue_entry 522 523 524 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry, 525 include_verify=True): 526 if include_verify: 527 self._run_pre_job_verify(queue_entry) 528 self._run_dispatcher() # job 529 self.mock_drone_manager.finish_process(_PidfileType.JOB) 530 self._run_dispatcher() # parsing + cleanup 531 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 532 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 533 exit_status=256) 534 self._run_dispatcher() # repair, HQE unaffected 535 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 536 self._run_dispatcher() 537 return queue_entry 538 539 540 def test_post_job_cleanup_failure(self): 541 queue_entry = self._setup_for_post_job_cleanup() 542 self._run_post_job_cleanup_failure_up_to_repair(queue_entry) 543 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 544 HostStatus.REPAIRING) 545 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 546 self._run_dispatcher() 547 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 548 549 550 def test_post_job_cleanup_failure_repair_failure(self): 551 queue_entry = self._setup_for_post_job_cleanup() 552 self._run_post_job_cleanup_failure_up_to_repair(queue_entry) 553 self.mock_drone_manager.finish_process(_PidfileType.REPAIR, 554 exit_status=256) 555 self._run_dispatcher() 556 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 557 HostStatus.REPAIR_FAILED) 558 559 560 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type): 561 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type] 562 queue_entry = self._update_instance(queue_entry) 563 pidfile_id = self.mock_drone_manager.pidfile_from_path( 564 queue_entry.execution_path(), pidfile_name) 565 self.assert_(pidfile_id._paired_with_pidfile) 566 567 568 def _finish_job(self, queue_entry): 569 self._check_statuses(queue_entry, HqeStatus.RUNNING) 570 self.mock_drone_manager.finish_process(_PidfileType.JOB) 571 self._run_dispatcher() # launches parsing 572 self._check_statuses(queue_entry, HqeStatus.PARSING) 573 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE) 574 self._finish_parsing(queue_entry) 575 576 577 def _finish_parsing(self, queue_entry): 578 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 579 self._run_dispatcher() 580 581 self._check_entry_status(queue_entry, HqeStatus.ARCHIVING) 582 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 583 self._run_dispatcher() 584 585 586 def _create_reverify_request(self): 587 host = self.hosts[0] 588 models.SpecialTask.schedule_special_task( 589 host=host, task=models.SpecialTask.Task.VERIFY) 590 return host 591 592 593 def test_requested_reverify(self): 594 host = self._create_reverify_request() 595 self._run_dispatcher() 596 self._check_host_status(host, HostStatus.VERIFYING) 597 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 598 self._run_dispatcher() 599 self._check_host_status(host, HostStatus.READY) 600 601 602 def test_requested_reverify_failure(self): 603 host = self._create_reverify_request() 604 self._run_dispatcher() 605 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 606 exit_status=256) 607 self._run_dispatcher() # repair 608 self._check_host_status(host, HostStatus.REPAIRING) 609 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 610 self._run_dispatcher() 611 self._check_host_status(host, HostStatus.READY) 612 613 614 def _setup_for_do_not_verify(self): 615 self._initialize_test() 616 job, queue_entry = self._make_job_and_queue_entry() 617 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY 618 queue_entry.host.save() 619 return queue_entry 620 621 622 def test_do_not_verify_job(self): 623 queue_entry = self._setup_for_do_not_verify() 624 self._run_dispatcher() # runs job directly 625 self._finish_job(queue_entry) 626 627 628 def test_do_not_verify_job_with_cleanup(self): 629 queue_entry = self._setup_for_do_not_verify() 630 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS 631 queue_entry.job.save() 632 633 self._run_dispatcher() # cleanup 634 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 635 self._run_dispatcher() # job 636 self._finish_job(queue_entry) 637 638 639 def test_do_not_verify_pre_job_cleanup_failure(self): 640 queue_entry = self._setup_for_do_not_verify() 641 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS 642 queue_entry.job.save() 643 644 self._run_dispatcher() # cleanup 645 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 646 exit_status=256) 647 self._run_dispatcher() # failure ignored; job runs 648 self._finish_job(queue_entry) 649 650 651 def test_do_not_verify_post_job_cleanup_failure(self): 652 queue_entry = self._setup_for_do_not_verify() 653 queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS 654 queue_entry.job.save() 655 656 self._run_post_job_cleanup_failure_up_to_repair(queue_entry, 657 include_verify=False) 658 # failure ignored, host still set to Ready 659 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY) 660 self._run_dispatcher() # nothing else runs 661 self._assert_nothing_is_running() 662 663 664 def test_do_not_verify_requested_reverify_failure(self): 665 host = self._create_reverify_request() 666 host.protection = host_protections.Protection.DO_NOT_VERIFY 667 host.save() 668 669 self._run_dispatcher() 670 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 671 exit_status=256) 672 self._run_dispatcher() 673 self._check_host_status(host, HostStatus.READY) # ignore failure 674 self._assert_nothing_is_running() 675 676 677 def test_job_abort_in_verify(self): 678 self._initialize_test() 679 job = self._create_job(hosts=[1]) 680 queue_entries = list(job.hostqueueentry_set.all()) 681 self._run_dispatcher() # launches verify 682 self._check_statuses(queue_entries[0], HqeStatus.VERIFYING) 683 job.hostqueueentry_set.update(aborted=True) 684 self._run_dispatcher() # kills verify, launches cleanup 685 self.assert_(self.mock_drone_manager.was_last_process_killed( 686 _PidfileType.VERIFY, set([signal.SIGKILL]))) 687 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 688 self._run_dispatcher() 689 690 691 def test_job_abort(self): 692 self._initialize_test() 693 job = self._create_job(hosts=[1]) 694 job.run_reset = False 695 job.save() 696 queue_entries = list(job.hostqueueentry_set.all()) 697 698 self._run_dispatcher() # launches job 699 700 self._check_statuses(queue_entries[0], HqeStatus.RUNNING) 701 702 job.hostqueueentry_set.update(aborted=True) 703 704 self._run_dispatcher() # kills job, launches gathering 705 706 self._check_statuses(queue_entries[0], HqeStatus.GATHERING) 707 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 708 self._run_dispatcher() # launches parsing + cleanup 709 queue_entry = job.hostqueueentry_set.all()[0] 710 self._finish_parsing(queue_entry) 711 # The abort will cause gathering to launch a cleanup. 712 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP) 713 self._run_dispatcher() 714 715 716 def test_job_abort_queued_synchronous(self): 717 self._initialize_test() 718 job = self._create_job(hosts=[1,2]) 719 job.synch_count = 2 720 job.save() 721 722 job.hostqueueentry_set.update(aborted=True) 723 self._run_dispatcher() 724 for host_queue_entry in job.hostqueueentry_set.all(): 725 self.assertEqual(host_queue_entry.status, 726 HqeStatus.ABORTED) 727 728 729 def test_no_pidfile_leaking(self): 730 self._initialize_test() 731 732 self.test_simple_job() 733 self.mock_drone_manager.refresh() 734 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 735 736 self.test_job_abort_in_verify() 737 self.mock_drone_manager.refresh() 738 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 739 740 self.test_job_abort() 741 self.mock_drone_manager.refresh() 742 self.assertEquals(self.mock_drone_manager._pidfiles, {}) 743 744 745 def _make_job_and_queue_entry(self): 746 job = self._create_job(hosts=[1]) 747 queue_entry = job.hostqueueentry_set.all()[0] 748 return job, queue_entry 749 750 751 def test_recover_running_no_process(self): 752 # recovery should re-execute a Running HQE if no process is found 753 _, queue_entry = self._make_job_and_queue_entry() 754 queue_entry.status = HqeStatus.RUNNING 755 queue_entry.execution_subdir = '1-myuser/host1' 756 queue_entry.save() 757 queue_entry.host.status = HostStatus.RUNNING 758 queue_entry.host.save() 759 760 self._initialize_test() 761 self._run_dispatcher() 762 self._finish_job(queue_entry) 763 764 765 def test_recover_verifying_hqe_no_special_task(self): 766 # recovery should move a Resetting HQE with no corresponding 767 # Verify or Reset SpecialTask back to Queued. 768 _, queue_entry = self._make_job_and_queue_entry() 769 queue_entry.status = HqeStatus.RESETTING 770 queue_entry.save() 771 772 # make some dummy SpecialTasks that shouldn't count 773 models.SpecialTask.objects.create( 774 host=queue_entry.host, 775 task=models.SpecialTask.Task.RESET, 776 requested_by=models.User.current_user()) 777 models.SpecialTask.objects.create( 778 host=queue_entry.host, 779 task=models.SpecialTask.Task.CLEANUP, 780 queue_entry=queue_entry, 781 is_complete=True, 782 requested_by=models.User.current_user()) 783 784 self._initialize_test() 785 self._check_statuses(queue_entry, HqeStatus.QUEUED) 786 787 788 def _test_recover_verifying_hqe_helper(self, task, pidfile_type): 789 _, queue_entry = self._make_job_and_queue_entry() 790 queue_entry.status = HqeStatus.VERIFYING 791 queue_entry.save() 792 793 special_task = models.SpecialTask.objects.create( 794 host=queue_entry.host, task=task, queue_entry=queue_entry) 795 796 self._initialize_test() 797 self._run_dispatcher() 798 self.mock_drone_manager.finish_process(pidfile_type) 799 self._run_dispatcher() 800 # don't bother checking the rest of the job execution, as long as the 801 # SpecialTask ran 802 803 804 def test_recover_verifying_hqe_with_cleanup(self): 805 # recover an HQE that was in pre-job cleanup 806 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP, 807 _PidfileType.CLEANUP) 808 809 810 def test_recover_verifying_hqe_with_verify(self): 811 # recover an HQE that was in pre-job verify 812 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY, 813 _PidfileType.VERIFY) 814 815 816 def test_recover_pending_hqes_with_group(self): 817 # recover a group of HQEs that are in Pending, in the same group (e.g., 818 # in a job with atomic hosts) 819 job = self._create_job(hosts=[1,2], atomic_group=1) 820 job.save() 821 822 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING) 823 824 self._initialize_test() 825 for queue_entry in job.hostqueueentry_set.all(): 826 self.assertEquals(queue_entry.status, HqeStatus.STARTING) 827 828 829 def test_recover_parsing(self): 830 self._initialize_test() 831 job, queue_entry = self._make_job_and_queue_entry() 832 job.run_verify = False 833 job.run_reset = False 834 job.reboot_after = model_attributes.RebootAfter.NEVER 835 job.save() 836 837 self._run_dispatcher() # launches job 838 self.mock_drone_manager.finish_process(_PidfileType.JOB) 839 self._run_dispatcher() # launches parsing 840 841 # now "restart" the scheduler 842 self._create_dispatcher() 843 self._initialize_test() 844 self._run_dispatcher() 845 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 846 self._run_dispatcher() 847 848 849 def test_recover_parsing__no_process_already_aborted(self): 850 _, queue_entry = self._make_job_and_queue_entry() 851 queue_entry.execution_subdir = 'host1' 852 queue_entry.status = HqeStatus.PARSING 853 queue_entry.aborted = True 854 queue_entry.save() 855 856 self._initialize_test() 857 self._run_dispatcher() 858 859 860 def test_job_scheduled_just_after_abort(self): 861 # test a pretty obscure corner case where a job is aborted while queued, 862 # another job is ready to run, and throttling is active. the post-abort 863 # cleanup must not be pre-empted by the second job. 864 # This test kind of doesn't make sense anymore after verify+cleanup 865 # were merged into reset. It should maybe just be removed. 866 job1, queue_entry1 = self._make_job_and_queue_entry() 867 queue_entry1.save() 868 job2, queue_entry2 = self._make_job_and_queue_entry() 869 job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY 870 job2.save() 871 872 self.mock_drone_manager.process_capacity = 0 873 self._run_dispatcher() # schedule job1, but won't start verify 874 job1.hostqueueentry_set.update(aborted=True) 875 self.mock_drone_manager.process_capacity = 100 876 self._run_dispatcher() # reset must run here, not verify for job2 877 self._check_statuses(queue_entry1, HqeStatus.ABORTED, 878 HostStatus.RESETTING) 879 self.mock_drone_manager.finish_process(_PidfileType.RESET) 880 self._run_dispatcher() # now verify starts for job2 881 self._check_statuses(queue_entry2, HqeStatus.RUNNING, 882 HostStatus.RUNNING) 883 884 885 def test_reverify_interrupting_pre_job(self): 886 # ensure things behave sanely if a reverify is scheduled in the middle 887 # of pre-job actions 888 _, queue_entry = self._make_job_and_queue_entry() 889 890 self._run_dispatcher() # pre-job verify 891 self._create_reverify_request() 892 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 893 exit_status=256) 894 self._run_dispatcher() # repair 895 self.mock_drone_manager.finish_process(_PidfileType.REPAIR) 896 self._run_dispatcher() # reverify runs now 897 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 898 self._run_dispatcher() # pre-job verify 899 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 900 self._run_dispatcher() # and job runs... 901 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING) 902 self._finish_job(queue_entry) # reverify has been deleted 903 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 904 HostStatus.READY) 905 self._assert_nothing_is_running() 906 907 908 def test_reverify_while_job_running(self): 909 # once a job is running, a reverify must not be allowed to preempt 910 # Gathering 911 _, queue_entry = self._make_job_and_queue_entry() 912 self._run_pre_job_verify(queue_entry) 913 self._run_dispatcher() # job runs 914 self._create_reverify_request() 915 # make job end with a signal, so gathering will run 916 self.mock_drone_manager.finish_process(_PidfileType.JOB, 917 exit_status=271) 918 self._run_dispatcher() # gathering must start 919 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 920 self._run_dispatcher() # parsing and cleanup 921 self._finish_parsing(queue_entry) 922 self._run_dispatcher() # now reverify runs 923 self._check_statuses(queue_entry, HqeStatus.FAILED, 924 HostStatus.VERIFYING) 925 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 926 self._run_dispatcher() 927 self._check_host_status(queue_entry.host, HostStatus.READY) 928 929 930 def test_reverify_while_host_pending(self): 931 # ensure that if a reverify is scheduled while a host is in Pending, it 932 # won't run until the host is actually free 933 job = self._create_job(hosts=[1,2]) 934 queue_entry = job.hostqueueentry_set.get(host__hostname='host1') 935 job.synch_count = 2 936 job.save() 937 938 host2 = self.hosts[1] 939 host2.locked = True 940 host2.save() 941 942 self._run_dispatcher() # verify host1 943 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 944 self._run_dispatcher() # host1 Pending 945 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) 946 self._create_reverify_request() 947 self._run_dispatcher() # nothing should happen here 948 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING) 949 950 # now let the job run 951 host2.locked = False 952 host2.save() 953 self._run_dispatcher() # verify host2 954 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 955 self._run_dispatcher() # run job 956 self._finish_job(queue_entry) 957 # the reverify should now be running 958 self._check_statuses(queue_entry, HqeStatus.COMPLETED, 959 HostStatus.VERIFYING) 960 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 961 self._run_dispatcher() 962 self._check_host_status(queue_entry.host, HostStatus.READY) 963 964 965 def test_throttling(self): 966 job = self._create_job(hosts=[1,2,3]) 967 job.synch_count = 3 968 job.save() 969 970 queue_entries = list(job.hostqueueentry_set.all()) 971 def _check_hqe_statuses(*statuses): 972 for queue_entry, status in zip(queue_entries, statuses): 973 self._check_statuses(queue_entry, status) 974 975 self.mock_drone_manager.process_capacity = 2 976 self._run_dispatcher() # verify runs on 1 and 2 977 queue_entries = list(job.hostqueueentry_set.all()) 978 _check_hqe_statuses(HqeStatus.QUEUED, 979 HqeStatus.VERIFYING, HqeStatus.VERIFYING) 980 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2) 981 982 self.mock_drone_manager.finish_specific_process( 983 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE) 984 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 985 self._run_dispatcher() # verify runs on 3 986 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING, 987 HqeStatus.PENDING) 988 989 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 990 self._run_dispatcher() # job won't run due to throttling 991 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING, 992 HqeStatus.STARTING) 993 self._assert_nothing_is_running() 994 995 self.mock_drone_manager.process_capacity = 3 996 self._run_dispatcher() # now job runs 997 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING, 998 HqeStatus.RUNNING) 999 1000 self.mock_drone_manager.process_capacity = 2 1001 self.mock_drone_manager.finish_process(_PidfileType.JOB, 1002 exit_status=271) 1003 self._run_dispatcher() # gathering won't run due to throttling 1004 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING, 1005 HqeStatus.GATHERING) 1006 self._assert_nothing_is_running() 1007 1008 self.mock_drone_manager.process_capacity = 3 1009 self._run_dispatcher() # now gathering runs 1010 1011 self.mock_drone_manager.process_capacity = 0 1012 self.mock_drone_manager.finish_process(_PidfileType.GATHER) 1013 self._run_dispatcher() # parsing runs despite throttling 1014 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING, 1015 HqeStatus.PARSING) 1016 1017 1018 def test_abort_starting_while_throttling(self): 1019 self._initialize_test() 1020 job = self._create_job(hosts=[1,2], synchronous=True) 1021 queue_entry = job.hostqueueentry_set.all()[0] 1022 job.run_verify = False 1023 job.run_reset = False 1024 job.reboot_after = model_attributes.RebootAfter.NEVER 1025 job.save() 1026 1027 self.mock_drone_manager.process_capacity = 0 1028 self._run_dispatcher() # go to starting, but don't start job 1029 self._check_statuses(queue_entry, HqeStatus.STARTING, 1030 HostStatus.PENDING) 1031 1032 job.hostqueueentry_set.update(aborted=True) 1033 self._run_dispatcher() 1034 self._check_statuses(queue_entry, HqeStatus.GATHERING, 1035 HostStatus.RUNNING) 1036 1037 self.mock_drone_manager.process_capacity = 5 1038 self._run_dispatcher() 1039 self._check_statuses(queue_entry, HqeStatus.ABORTED, 1040 HostStatus.CLEANING) 1041 1042 1043 def test_simple_metahost_assignment(self): 1044 job = self._create_job(metahosts=[1]) 1045 self._run_dispatcher() 1046 entry = job.hostqueueentry_set.all()[0] 1047 self.assertEquals(entry.host.hostname, 'host1') 1048 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) 1049 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 1050 self._run_dispatcher() 1051 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) 1052 # rest of job proceeds normally 1053 1054 1055 def test_metahost_fail_verify(self): 1056 self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2 1057 job = self._create_job(metahosts=[1]) 1058 self._run_dispatcher() # assigned to host1 1059 self.mock_drone_manager.finish_process(_PidfileType.VERIFY, 1060 exit_status=256) 1061 self._run_dispatcher() # host1 failed, gets reassigned to host2 1062 entry = job.hostqueueentry_set.all()[0] 1063 self.assertEquals(entry.host.hostname, 'host2') 1064 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING) 1065 self._check_host_status(self.hosts[0], HostStatus.REPAIRING) 1066 1067 self.mock_drone_manager.finish_process(_PidfileType.VERIFY) 1068 self._run_dispatcher() 1069 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING) 1070 1071 1072 def test_hostless_job(self): 1073 job = self._create_job(hostless=True) 1074 entry = job.hostqueueentry_set.all()[0] 1075 1076 self._run_dispatcher() 1077 self._check_entry_status(entry, HqeStatus.RUNNING) 1078 1079 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1080 self._run_dispatcher() 1081 self._check_entry_status(entry, HqeStatus.PARSING) 1082 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1083 self._run_dispatcher() 1084 self._check_entry_status(entry, HqeStatus.ARCHIVING) 1085 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE) 1086 self._run_dispatcher() 1087 self._check_entry_status(entry, HqeStatus.COMPLETED) 1088 1089 1090 def test_pre_job_keyvals(self): 1091 job = self._create_job(hosts=[1]) 1092 job.run_verify = False 1093 job.run_reset = False 1094 job.reboot_before = model_attributes.RebootBefore.NEVER 1095 job.save() 1096 models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue') 1097 1098 self._run_dispatcher() 1099 self._finish_job(job.hostqueueentry_set.all()[0]) 1100 1101 attached_files = self.mock_drone_manager.attached_files( 1102 '1-autotest_system/host1') 1103 job_keyval_path = '1-autotest_system/host1/keyval' 1104 self.assert_(job_keyval_path in attached_files, attached_files) 1105 keyval_contents = attached_files[job_keyval_path] 1106 keyval_dict = dict(line.strip().split('=', 1) 1107 for line in keyval_contents.splitlines()) 1108 self.assert_('job_queued' in keyval_dict, keyval_dict) 1109 self.assertEquals(keyval_dict['mykey'], 'myvalue') 1110 1111 1112# This tests the scheduler functions with archiving step disabled 1113class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest): 1114 def _set_global_config_values(self): 1115 super(SchedulerFunctionalTestNoArchiving, self 1116 )._set_global_config_values() 1117 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving', 1118 False) 1119 1120 1121 def _finish_parsing(self, queue_entry): 1122 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1123 self._run_dispatcher() 1124 1125 1126 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry, 1127 include_verify=True): 1128 if include_verify: 1129 self._run_pre_job_verify(queue_entry) 1130 self._run_dispatcher() # job 1131 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1132 self._run_dispatcher() # parsing + cleanup 1133 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1134 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP, 1135 exit_status=256) 1136 self._run_dispatcher() # repair, HQE unaffected 1137 return queue_entry 1138 1139 1140 def test_hostless_job(self): 1141 job = self._create_job(hostless=True) 1142 entry = job.hostqueueentry_set.all()[0] 1143 1144 self._run_dispatcher() 1145 self._check_entry_status(entry, HqeStatus.RUNNING) 1146 1147 self.mock_drone_manager.finish_process(_PidfileType.JOB) 1148 self._run_dispatcher() 1149 self._check_entry_status(entry, HqeStatus.PARSING) 1150 self.mock_drone_manager.finish_process(_PidfileType.PARSE) 1151 self._run_dispatcher() 1152 self._check_entry_status(entry, HqeStatus.COMPLETED) 1153 1154 1155if __name__ == '__main__': 1156 unittest.main() 1157