Best Python code snippet using autotest_python
monitor_db_functional_test.py
Source:monitor_db_functional_test.py
...290 def _update_instance(self, model_instance):291 return type(model_instance).objects.get(pk=model_instance.pk)292 def _check_statuses(self, queue_entry, queue_entry_status,293 host_status=None):294 self._check_entry_status(queue_entry, queue_entry_status)295 if host_status:296 self._check_host_status(queue_entry.host, host_status)297 def _check_entry_status(self, queue_entry, status):298 # update from DB299 queue_entry = self._update_instance(queue_entry)300 self.assertEquals(queue_entry.status, status)301 def _check_host_status(self, host, status):302 # update from DB303 host = self._update_instance(host)304 self.assertEquals(host.status, status)305 def _run_pre_job_verify(self, queue_entry):306 self._run_dispatcher() # launches verify307 self._check_statuses(queue_entry, HqeStatus.VERIFYING,308 HostStatus.VERIFYING)309 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)310 def test_simple_job(self):311 self._initialize_test()312 job, queue_entry = self._make_job_and_queue_entry()313 self._run_pre_job_verify(queue_entry)314 self._run_dispatcher() # launches job315 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)316 self._finish_job(queue_entry)317 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)318 self._assert_nothing_is_running()319 def _setup_for_pre_job_reset(self):320 self._initialize_test()321 job, queue_entry = self._make_job_and_queue_entry()322 job.reboot_before = model_attributes.RebootBefore.ALWAYS323 job.save()324 return queue_entry325 def _run_pre_job_reset_job(self, queue_entry):326 self._run_dispatcher() # reset327 self._check_statuses(queue_entry, HqeStatus.RESETTING,328 HostStatus.RESETTING)329 self.mock_drone_manager.finish_process(_PidfileType.RESET)330 self._run_dispatcher() # job331 self._finish_job(queue_entry)332 def test_pre_job_reset(self):333 queue_entry = self._setup_for_pre_job_reset()334 self._run_pre_job_reset_job(queue_entry)335 def _run_pre_job_reset_one_failure(self):336 queue_entry = self._setup_for_pre_job_reset()337 self._run_dispatcher() # reset338 self.mock_drone_manager.finish_process(_PidfileType.RESET,339 exit_status=256)340 self._run_dispatcher() # repair341 self._check_statuses(queue_entry, HqeStatus.QUEUED,342 HostStatus.REPAIRING)343 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)344 return queue_entry345 def test_pre_job_reset_failure(self):346 queue_entry = self._run_pre_job_reset_one_failure()347 # from here the job should run as normal348 self._run_pre_job_reset_job(queue_entry)349 def test_pre_job_reset_double_failure(self):350 # TODO (showard): this test isn't perfect. in reality, when the second351 # reset fails, it copies its results over to the job directory using352 # copy_results_on_drone() and then parses them. since we don't handle353 # that, there appear to be no results at the job directory. the354 # scheduler handles this gracefully, parsing gets effectively skipped,355 # and this test passes as is. but we ought to properly test that356 # behavior.357 queue_entry = self._run_pre_job_reset_one_failure()358 self._run_dispatcher() # second reset359 self.mock_drone_manager.finish_process(_PidfileType.RESET,360 exit_status=256)361 self._run_dispatcher()362 self._check_statuses(queue_entry, HqeStatus.FAILED,363 HostStatus.REPAIR_FAILED)364 # nothing else should run365 self._assert_nothing_is_running()366 def _assert_nothing_is_running(self):367 self.assertEquals(self.mock_drone_manager.running_pidfile_ids(), [])368 def _setup_for_post_job_cleanup(self):369 self._initialize_test()370 job, queue_entry = self._make_job_and_queue_entry()371 job.reboot_after = model_attributes.RebootAfter.ALWAYS372 job.save()373 return queue_entry374 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,375 include_verify=True):376 if include_verify:377 self._run_pre_job_verify(queue_entry)378 self._run_dispatcher() # job379 self.mock_drone_manager.finish_process(_PidfileType.JOB)380 self._run_dispatcher() # parsing + cleanup381 self.mock_drone_manager.finish_process(_PidfileType.PARSE)382 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,383 exit_status=256)384 self._run_dispatcher() # repair, HQE unaffected385 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)386 self._run_dispatcher()387 return queue_entry388 def test_post_job_cleanup_failure(self):389 queue_entry = self._setup_for_post_job_cleanup()390 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)391 self._check_statuses(queue_entry, HqeStatus.COMPLETED,392 HostStatus.REPAIRING)393 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)394 self._run_dispatcher()395 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)396 def test_post_job_cleanup_failure_repair_failure(self):397 queue_entry = self._setup_for_post_job_cleanup()398 self._run_post_job_cleanup_failure_up_to_repair(queue_entry)399 self.mock_drone_manager.finish_process(_PidfileType.REPAIR,400 exit_status=256)401 self._run_dispatcher()402 self._check_statuses(queue_entry, HqeStatus.COMPLETED,403 HostStatus.REPAIR_FAILED)404 def _ensure_post_job_process_is_paired(self, queue_entry, pidfile_type):405 pidfile_name = _PIDFILE_TYPE_TO_PIDFILE[pidfile_type]406 queue_entry = self._update_instance(queue_entry)407 pidfile_id = self.mock_drone_manager.pidfile_from_path(408 queue_entry.execution_path(), pidfile_name)409 self.assert_(pidfile_id._paired_with_pidfile)410 def _finish_job(self, queue_entry):411 self._check_statuses(queue_entry, HqeStatus.RUNNING)412 self.mock_drone_manager.finish_process(_PidfileType.JOB)413 self._run_dispatcher() # launches parsing414 self._check_statuses(queue_entry, HqeStatus.PARSING)415 self._ensure_post_job_process_is_paired(queue_entry, _PidfileType.PARSE)416 self._finish_parsing(queue_entry)417 def _finish_parsing(self, queue_entry):418 self.mock_drone_manager.finish_process(_PidfileType.PARSE)419 self._run_dispatcher()420 self._check_entry_status(queue_entry, HqeStatus.ARCHIVING)421 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)422 self._run_dispatcher()423 def _create_reverify_request(self):424 host = self.hosts[0]425 models.SpecialTask.schedule_special_task(426 host=host, task=models.SpecialTask.Task.VERIFY)427 return host428 def test_requested_reverify(self):429 host = self._create_reverify_request()430 self._run_dispatcher()431 self._check_host_status(host, HostStatus.VERIFYING)432 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)433 self._run_dispatcher()434 self._check_host_status(host, HostStatus.READY)435 def test_requested_reverify_failure(self):436 host = self._create_reverify_request()437 self._run_dispatcher()438 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,439 exit_status=256)440 self._run_dispatcher() # repair441 self._check_host_status(host, HostStatus.REPAIRING)442 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)443 self._run_dispatcher()444 self._check_host_status(host, HostStatus.READY)445 def _setup_for_do_not_verify(self):446 self._initialize_test()447 job, queue_entry = self._make_job_and_queue_entry()448 queue_entry.host.protection = host_protections.Protection.DO_NOT_VERIFY449 queue_entry.host.save()450 return queue_entry451 def test_do_not_verify_job(self):452 queue_entry = self._setup_for_do_not_verify()453 self._run_dispatcher() # runs job directly454 self._finish_job(queue_entry)455 def test_do_not_verify_job_with_cleanup(self):456 queue_entry = self._setup_for_do_not_verify()457 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS458 queue_entry.job.save()459 self._run_dispatcher() # cleanup460 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)461 self._run_dispatcher() # job462 self._finish_job(queue_entry)463 def test_do_not_verify_pre_job_cleanup_failure(self):464 queue_entry = self._setup_for_do_not_verify()465 queue_entry.job.reboot_before = model_attributes.RebootBefore.ALWAYS466 queue_entry.job.save()467 self._run_dispatcher() # cleanup468 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,469 exit_status=256)470 self._run_dispatcher() # failure ignored; job runs471 self._finish_job(queue_entry)472 def test_do_not_verify_post_job_cleanup_failure(self):473 queue_entry = self._setup_for_do_not_verify()474 queue_entry.job.reboot_after = model_attributes.RebootAfter.ALWAYS475 queue_entry.job.save()476 self._run_post_job_cleanup_failure_up_to_repair(queue_entry,477 include_verify=False)478 # failure ignored, host still set to Ready479 self._check_statuses(queue_entry, HqeStatus.COMPLETED, HostStatus.READY)480 self._run_dispatcher() # nothing else runs481 self._assert_nothing_is_running()482 def test_do_not_verify_requested_reverify_failure(self):483 host = self._create_reverify_request()484 host.protection = host_protections.Protection.DO_NOT_VERIFY485 host.save()486 self._run_dispatcher()487 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,488 exit_status=256)489 self._run_dispatcher()490 self._check_host_status(host, HostStatus.READY) # ignore failure491 self._assert_nothing_is_running()492 def test_job_abort_in_verify(self):493 self._initialize_test()494 job = self._create_job(hosts=[1])495 queue_entries = list(job.hostqueueentry_set.all())496 self._run_dispatcher() # launches verify497 self._check_statuses(queue_entries[0], HqeStatus.VERIFYING)498 job.hostqueueentry_set.update(aborted=True)499 self._run_dispatcher() # kills verify, launches cleanup500 self.assert_(self.mock_drone_manager.was_last_process_killed(501 _PidfileType.VERIFY, set([signal.SIGKILL])))502 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)503 self._run_dispatcher()504 def test_job_abort(self):505 self._initialize_test()506 job = self._create_job(hosts=[1])507 job.run_reset = False508 job.save()509 queue_entries = list(job.hostqueueentry_set.all())510 self._run_dispatcher() # launches job511 self._check_statuses(queue_entries[0], HqeStatus.RUNNING)512 job.hostqueueentry_set.update(aborted=True)513 self._run_dispatcher() # kills job, launches gathering514 self._check_statuses(queue_entries[0], HqeStatus.GATHERING)515 self.mock_drone_manager.finish_process(_PidfileType.GATHER)516 self._run_dispatcher() # launches parsing + cleanup517 queue_entry = job.hostqueueentry_set.all()[0]518 self._finish_parsing(queue_entry)519 # The abort will cause gathering to launch a cleanup.520 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP)521 self._run_dispatcher()522 def test_job_abort_queued_synchronous(self):523 self._initialize_test()524 job = self._create_job(hosts=[1,2])525 job.synch_count = 2526 job.save()527 job.hostqueueentry_set.update(aborted=True)528 self._run_dispatcher()529 for host_queue_entry in job.hostqueueentry_set.all():530 self.assertEqual(host_queue_entry.status,531 HqeStatus.ABORTED)532 def test_no_pidfile_leaking(self):533 self._initialize_test()534 self.test_simple_job()535 self.mock_drone_manager.refresh()536 self.assertEquals(self.mock_drone_manager._pidfiles, {})537 self.test_job_abort_in_verify()538 self.mock_drone_manager.refresh()539 self.assertEquals(self.mock_drone_manager._pidfiles, {})540 self.test_job_abort()541 self.mock_drone_manager.refresh()542 self.assertEquals(self.mock_drone_manager._pidfiles, {})543 def _make_job_and_queue_entry(self):544 job = self._create_job(hosts=[1])545 queue_entry = job.hostqueueentry_set.all()[0]546 return job, queue_entry547 def test_recover_running_no_process(self):548 # recovery should re-execute a Running HQE if no process is found549 _, queue_entry = self._make_job_and_queue_entry()550 queue_entry.status = HqeStatus.RUNNING551 queue_entry.execution_subdir = '1-myuser/host1'552 queue_entry.save()553 queue_entry.host.status = HostStatus.RUNNING554 queue_entry.host.save()555 self._initialize_test()556 self._run_dispatcher()557 self._finish_job(queue_entry)558 def test_recover_verifying_hqe_no_special_task(self):559 # recovery should move a Resetting HQE with no corresponding560 # Verify or Reset SpecialTask back to Queued.561 _, queue_entry = self._make_job_and_queue_entry()562 queue_entry.status = HqeStatus.RESETTING563 queue_entry.save()564 # make some dummy SpecialTasks that shouldn't count565 models.SpecialTask.objects.create(566 host=queue_entry.host,567 task=models.SpecialTask.Task.RESET,568 requested_by=models.User.current_user())569 models.SpecialTask.objects.create(570 host=queue_entry.host,571 task=models.SpecialTask.Task.CLEANUP,572 queue_entry=queue_entry,573 is_complete=True,574 requested_by=models.User.current_user())575 self._initialize_test()576 self._check_statuses(queue_entry, HqeStatus.QUEUED)577 def _test_recover_verifying_hqe_helper(self, task, pidfile_type):578 _, queue_entry = self._make_job_and_queue_entry()579 queue_entry.status = HqeStatus.VERIFYING580 queue_entry.save()581 special_task = models.SpecialTask.objects.create(582 host=queue_entry.host, task=task, queue_entry=queue_entry)583 self._initialize_test()584 self._run_dispatcher()585 self.mock_drone_manager.finish_process(pidfile_type)586 self._run_dispatcher()587 # don't bother checking the rest of the job execution, as long as the588 # SpecialTask ran589 def test_recover_verifying_hqe_with_cleanup(self):590 # recover an HQE that was in pre-job cleanup591 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.CLEANUP,592 _PidfileType.CLEANUP)593 def test_recover_verifying_hqe_with_verify(self):594 # recover an HQE that was in pre-job verify595 self._test_recover_verifying_hqe_helper(models.SpecialTask.Task.VERIFY,596 _PidfileType.VERIFY)597 def test_recover_pending_hqes_with_group(self):598 # recover a group of HQEs that are in Pending, in the same group (e.g.,599 # in a job with atomic hosts)600 job = self._create_job(hosts=[1,2], atomic_group=1)601 job.save()602 job.hostqueueentry_set.all().update(status=HqeStatus.PENDING)603 self._initialize_test()604 for queue_entry in job.hostqueueentry_set.all():605 self.assertEquals(queue_entry.status, HqeStatus.STARTING)606 def test_recover_parsing(self):607 self._initialize_test()608 job, queue_entry = self._make_job_and_queue_entry()609 job.run_verify = False610 job.run_reset = False611 job.reboot_after = model_attributes.RebootAfter.NEVER612 job.save()613 self._run_dispatcher() # launches job614 self.mock_drone_manager.finish_process(_PidfileType.JOB)615 self._run_dispatcher() # launches parsing616 # now "restart" the scheduler617 self._create_dispatcher()618 self._initialize_test()619 self._run_dispatcher()620 self.mock_drone_manager.finish_process(_PidfileType.PARSE)621 self._run_dispatcher()622 def test_recover_parsing__no_process_already_aborted(self):623 _, queue_entry = self._make_job_and_queue_entry()624 queue_entry.execution_subdir = 'host1'625 queue_entry.status = HqeStatus.PARSING626 queue_entry.aborted = True627 queue_entry.save()628 self._initialize_test()629 self._run_dispatcher()630 def test_job_scheduled_just_after_abort(self):631 # test a pretty obscure corner case where a job is aborted while queued,632 # another job is ready to run, and throttling is active. the post-abort633 # cleanup must not be pre-empted by the second job.634 # This test kind of doesn't make sense anymore after verify+cleanup635 # were merged into reset. It should maybe just be removed.636 job1, queue_entry1 = self._make_job_and_queue_entry()637 queue_entry1.save()638 job2, queue_entry2 = self._make_job_and_queue_entry()639 job2.reboot_before = model_attributes.RebootBefore.IF_DIRTY640 job2.save()641 self.mock_drone_manager.process_capacity = 0642 self._run_dispatcher() # schedule job1, but won't start verify643 job1.hostqueueentry_set.update(aborted=True)644 self.mock_drone_manager.process_capacity = 100645 self._run_dispatcher() # reset must run here, not verify for job2646 self._check_statuses(queue_entry1, HqeStatus.ABORTED,647 HostStatus.RESETTING)648 self.mock_drone_manager.finish_process(_PidfileType.RESET)649 self._run_dispatcher() # now verify starts for job2650 self._check_statuses(queue_entry2, HqeStatus.RUNNING,651 HostStatus.RUNNING)652 def test_reverify_interrupting_pre_job(self):653 # ensure things behave sanely if a reverify is scheduled in the middle654 # of pre-job actions655 _, queue_entry = self._make_job_and_queue_entry()656 self._run_dispatcher() # pre-job verify657 self._create_reverify_request()658 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,659 exit_status=256)660 self._run_dispatcher() # repair661 self.mock_drone_manager.finish_process(_PidfileType.REPAIR)662 self._run_dispatcher() # reverify runs now663 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)664 self._run_dispatcher() # pre-job verify665 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)666 self._run_dispatcher() # and job runs...667 self._check_statuses(queue_entry, HqeStatus.RUNNING, HostStatus.RUNNING)668 self._finish_job(queue_entry) # reverify has been deleted669 self._check_statuses(queue_entry, HqeStatus.COMPLETED,670 HostStatus.READY)671 self._assert_nothing_is_running()672 def test_reverify_while_job_running(self):673 # once a job is running, a reverify must not be allowed to preempt674 # Gathering675 _, queue_entry = self._make_job_and_queue_entry()676 self._run_pre_job_verify(queue_entry)677 self._run_dispatcher() # job runs678 self._create_reverify_request()679 # make job end with a signal, so gathering will run680 self.mock_drone_manager.finish_process(_PidfileType.JOB,681 exit_status=271)682 self._run_dispatcher() # gathering must start683 self.mock_drone_manager.finish_process(_PidfileType.GATHER)684 self._run_dispatcher() # parsing and cleanup685 self._finish_parsing(queue_entry)686 self._run_dispatcher() # now reverify runs687 self._check_statuses(queue_entry, HqeStatus.FAILED,688 HostStatus.VERIFYING)689 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)690 self._run_dispatcher()691 self._check_host_status(queue_entry.host, HostStatus.READY)692 def test_reverify_while_host_pending(self):693 # ensure that if a reverify is scheduled while a host is in Pending, it694 # won't run until the host is actually free695 job = self._create_job(hosts=[1,2])696 queue_entry = job.hostqueueentry_set.get(host__hostname='host1')697 job.synch_count = 2698 job.save()699 host2 = self.hosts[1]700 host2.locked = True701 host2.save()702 self._run_dispatcher() # verify host1703 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)704 self._run_dispatcher() # host1 Pending705 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)706 self._create_reverify_request()707 self._run_dispatcher() # nothing should happen here708 self._check_statuses(queue_entry, HqeStatus.PENDING, HostStatus.PENDING)709 # now let the job run710 host2.locked = False711 host2.save()712 self._run_dispatcher() # verify host2713 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)714 self._run_dispatcher() # run job715 self._finish_job(queue_entry)716 # the reverify should now be running717 self._check_statuses(queue_entry, HqeStatus.COMPLETED,718 HostStatus.VERIFYING)719 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)720 self._run_dispatcher()721 self._check_host_status(queue_entry.host, HostStatus.READY)722 def test_throttling(self):723 job = self._create_job(hosts=[1,2,3])724 job.synch_count = 3725 job.save()726 queue_entries = list(job.hostqueueentry_set.all())727 def _check_hqe_statuses(*statuses):728 for queue_entry, status in zip(queue_entries, statuses):729 self._check_statuses(queue_entry, status)730 self.mock_drone_manager.process_capacity = 2731 self._run_dispatcher() # verify runs on 1 and 2732 queue_entries = list(job.hostqueueentry_set.all())733 _check_hqe_statuses(HqeStatus.QUEUED,734 HqeStatus.VERIFYING, HqeStatus.VERIFYING)735 self.assertEquals(len(self.mock_drone_manager.running_pidfile_ids()), 2)736 self.mock_drone_manager.finish_specific_process(737 'hosts/host3/1-verify', drone_manager.AUTOSERV_PID_FILE)738 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)739 self._run_dispatcher() # verify runs on 3740 _check_hqe_statuses(HqeStatus.VERIFYING, HqeStatus.PENDING,741 HqeStatus.PENDING)742 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)743 self._run_dispatcher() # job won't run due to throttling744 _check_hqe_statuses(HqeStatus.STARTING, HqeStatus.STARTING,745 HqeStatus.STARTING)746 self._assert_nothing_is_running()747 self.mock_drone_manager.process_capacity = 3748 self._run_dispatcher() # now job runs749 _check_hqe_statuses(HqeStatus.RUNNING, HqeStatus.RUNNING,750 HqeStatus.RUNNING)751 self.mock_drone_manager.process_capacity = 2752 self.mock_drone_manager.finish_process(_PidfileType.JOB,753 exit_status=271)754 self._run_dispatcher() # gathering won't run due to throttling755 _check_hqe_statuses(HqeStatus.GATHERING, HqeStatus.GATHERING,756 HqeStatus.GATHERING)757 self._assert_nothing_is_running()758 self.mock_drone_manager.process_capacity = 3759 self._run_dispatcher() # now gathering runs760 self.mock_drone_manager.process_capacity = 0761 self.mock_drone_manager.finish_process(_PidfileType.GATHER)762 self._run_dispatcher() # parsing runs despite throttling763 _check_hqe_statuses(HqeStatus.PARSING, HqeStatus.PARSING,764 HqeStatus.PARSING)765 def test_abort_starting_while_throttling(self):766 self._initialize_test()767 job = self._create_job(hosts=[1,2], synchronous=True)768 queue_entry = job.hostqueueentry_set.all()[0]769 job.run_verify = False770 job.run_reset = False771 job.reboot_after = model_attributes.RebootAfter.NEVER772 job.save()773 self.mock_drone_manager.process_capacity = 0774 self._run_dispatcher() # go to starting, but don't start job775 self._check_statuses(queue_entry, HqeStatus.STARTING,776 HostStatus.PENDING)777 job.hostqueueentry_set.update(aborted=True)778 self._run_dispatcher()779 self._check_statuses(queue_entry, HqeStatus.GATHERING,780 HostStatus.RUNNING)781 self.mock_drone_manager.process_capacity = 5782 self._run_dispatcher()783 self._check_statuses(queue_entry, HqeStatus.ABORTED,784 HostStatus.CLEANING)785 def test_simple_metahost_assignment(self):786 job = self._create_job(metahosts=[1])787 self._run_dispatcher()788 entry = job.hostqueueentry_set.all()[0]789 self.assertEquals(entry.host.hostname, 'host1')790 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)791 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)792 self._run_dispatcher()793 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)794 # rest of job proceeds normally795 def test_metahost_fail_verify(self):796 self.hosts[1].labels.add(self.labels[0]) # put label1 also on host2797 job = self._create_job(metahosts=[1])798 self._run_dispatcher() # assigned to host1799 self.mock_drone_manager.finish_process(_PidfileType.VERIFY,800 exit_status=256)801 self._run_dispatcher() # host1 failed, gets reassigned to host2802 entry = job.hostqueueentry_set.all()[0]803 self.assertEquals(entry.host.hostname, 'host2')804 self._check_statuses(entry, HqeStatus.VERIFYING, HostStatus.VERIFYING)805 self._check_host_status(self.hosts[0], HostStatus.REPAIRING)806 self.mock_drone_manager.finish_process(_PidfileType.VERIFY)807 self._run_dispatcher()808 self._check_statuses(entry, HqeStatus.RUNNING, HostStatus.RUNNING)809 def test_hostless_job(self):810 job = self._create_job(hostless=True)811 entry = job.hostqueueentry_set.all()[0]812 self._run_dispatcher()813 self._check_entry_status(entry, HqeStatus.RUNNING)814 self.mock_drone_manager.finish_process(_PidfileType.JOB)815 self._run_dispatcher()816 self._check_entry_status(entry, HqeStatus.PARSING)817 self.mock_drone_manager.finish_process(_PidfileType.PARSE)818 self._run_dispatcher()819 self._check_entry_status(entry, HqeStatus.ARCHIVING)820 self.mock_drone_manager.finish_process(_PidfileType.ARCHIVE)821 self._run_dispatcher()822 self._check_entry_status(entry, HqeStatus.COMPLETED)823 def test_pre_job_keyvals(self):824 job = self._create_job(hosts=[1])825 job.run_verify = False826 job.run_reset = False827 job.reboot_before = model_attributes.RebootBefore.NEVER828 job.save()829 models.JobKeyval.objects.create(job=job, key='mykey', value='myvalue')830 self._run_dispatcher()831 self._finish_job(job.hostqueueentry_set.all()[0])832 attached_files = self.mock_drone_manager.attached_files(833 '1-autotest_system/host1')834 job_keyval_path = '1-autotest_system/host1/keyval'835 self.assert_(job_keyval_path in attached_files, attached_files)836 keyval_contents = attached_files[job_keyval_path]837 keyval_dict = dict(line.strip().split('=', 1)838 for line in keyval_contents.splitlines())839 self.assert_('job_queued' in keyval_dict, keyval_dict)840 self.assertEquals(keyval_dict['mykey'], 'myvalue')841# This tests the scheduler functions with archiving step disabled842class SchedulerFunctionalTestNoArchiving(SchedulerFunctionalTest):843 def _set_global_config_values(self):844 super(SchedulerFunctionalTestNoArchiving, self845 )._set_global_config_values()846 self.mock_config.set_config_value('SCHEDULER', 'enable_archiving',847 False)848 def _finish_parsing(self, queue_entry):849 self.mock_drone_manager.finish_process(_PidfileType.PARSE)850 self._run_dispatcher()851 def _run_post_job_cleanup_failure_up_to_repair(self, queue_entry,852 include_verify=True):853 if include_verify:854 self._run_pre_job_verify(queue_entry)855 self._run_dispatcher() # job856 self.mock_drone_manager.finish_process(_PidfileType.JOB)857 self._run_dispatcher() # parsing + cleanup858 self.mock_drone_manager.finish_process(_PidfileType.PARSE)859 self.mock_drone_manager.finish_process(_PidfileType.CLEANUP,860 exit_status=256)861 self._run_dispatcher() # repair, HQE unaffected862 return queue_entry863 def test_hostless_job(self):864 job = self._create_job(hostless=True)865 entry = job.hostqueueentry_set.all()[0]866 self._run_dispatcher()867 self._check_entry_status(entry, HqeStatus.RUNNING)868 self.mock_drone_manager.finish_process(_PidfileType.JOB)869 self._run_dispatcher()870 self._check_entry_status(entry, HqeStatus.PARSING)871 self.mock_drone_manager.finish_process(_PidfileType.PARSE)872 self._run_dispatcher()873 self._check_entry_status(entry, HqeStatus.COMPLETED)874if __name__ == '__main__':...
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!