1#pylint: disable-msg=C0111
2#!/usr/bin/python
3
4import datetime
5import common
6
7from autotest_lib.frontend import setup_django_environment
8from autotest_lib.frontend.afe import frontend_test_utils
9from autotest_lib.frontend.afe import models, rpc_interface, frontend_test_utils
10from autotest_lib.frontend.afe import model_logic, model_attributes
11from autotest_lib.client.common_lib import global_config
12from autotest_lib.client.common_lib import control_data
13from autotest_lib.client.common_lib import error
14from autotest_lib.client.common_lib import priorities
15from autotest_lib.client.common_lib.test_utils import mock
16from autotest_lib.client.common_lib.test_utils import unittest
17from autotest_lib.server import frontend
18from autotest_lib.server import utils as server_utils
19from autotest_lib.server.cros.dynamic_suite import frontend_wrappers
20
21CLIENT = control_data.CONTROL_TYPE_NAMES.CLIENT
22SERVER = control_data.CONTROL_TYPE_NAMES.SERVER
23
24_hqe_status = models.HostQueueEntry.Status
25
26
27class RpcInterfaceTest(unittest.TestCase,
28                       frontend_test_utils.FrontendTestMixin):
29    def setUp(self):
30        self._frontend_common_setup()
31        self.god = mock.mock_god()
32
33
34    def tearDown(self):
35        self.god.unstub_all()
36        self._frontend_common_teardown()
37        global_config.global_config.reset_config_values()
38
39
40    def test_validation(self):
41        # non-number for a numeric field
42        self.assertRaises(model_logic.ValidationError,
43                          rpc_interface.add_atomic_group, name='foo',
44                          max_number_of_machines='bar')
45        # omit a required field
46        self.assertRaises(model_logic.ValidationError, rpc_interface.add_label,
47                          name=None)
48        # violate uniqueness constraint
49        self.assertRaises(model_logic.ValidationError, rpc_interface.add_host,
50                          hostname='host1')
51
52
53    def test_multiple_platforms(self):
54        platform2 = models.Label.objects.create(name='platform2', platform=True)
55        self.assertRaises(model_logic.ValidationError,
56                          rpc_interface. label_add_hosts, id='platform2',
57                          hosts=['host1', 'host2'])
58        self.assertRaises(model_logic.ValidationError,
59                          rpc_interface.host_add_labels,
60                          id='host1', labels=['platform2'])
61        # make sure the platform didn't get added
62        platforms = rpc_interface.get_labels(
63            host__hostname__in=['host1', 'host2'], platform=True)
64        self.assertEquals(len(platforms), 1)
65        self.assertEquals(platforms[0]['name'], 'myplatform')
66
67
68    def _check_hostnames(self, hosts, expected_hostnames):
69        self.assertEquals(set(host['hostname'] for host in hosts),
70                          set(expected_hostnames))
71
72
73    def test_get_hosts(self):
74        hosts = rpc_interface.get_hosts()
75        self._check_hostnames(hosts, [host.hostname for host in self.hosts])
76
77        hosts = rpc_interface.get_hosts(hostname='host1')
78        self._check_hostnames(hosts, ['host1'])
79        host = hosts[0]
80        self.assertEquals(sorted(host['labels']), ['label1', 'myplatform'])
81        self.assertEquals(host['platform'], 'myplatform')
82        self.assertEquals(host['atomic_group'], None)
83        self.assertEquals(host['acls'], ['my_acl'])
84        self.assertEquals(host['attributes'], {})
85
86
87    def test_get_hosts_multiple_labels(self):
88        hosts = rpc_interface.get_hosts(
89                multiple_labels=['myplatform', 'label1'])
90        self._check_hostnames(hosts, ['host1'])
91
92
93    def test_get_hosts_exclude_only_if_needed(self):
94        self.hosts[0].labels.add(self.label3)
95
96        hosts = rpc_interface.get_hosts(hostname__in=['host1', 'host2'],
97                                        exclude_only_if_needed_labels=True)
98        self._check_hostnames(hosts, ['host2'])
99
100
101    def test_get_hosts_exclude_atomic_group_hosts(self):
102        hosts = rpc_interface.get_hosts(
103                exclude_atomic_group_hosts=True,
104                hostname__in=['host4', 'host5', 'host6'])
105        self._check_hostnames(hosts, ['host4'])
106
107
108    def test_get_hosts_exclude_both(self):
109        self.hosts[0].labels.add(self.label3)
110
111        hosts = rpc_interface.get_hosts(
112                hostname__in=['host1', 'host2', 'host5'],
113                exclude_only_if_needed_labels=True,
114                exclude_atomic_group_hosts=True)
115        self._check_hostnames(hosts, ['host2'])
116
117
118    def test_job_keyvals(self):
119        keyval_dict = {'mykey': 'myvalue'}
120        job_id = rpc_interface.create_job(name='test', priority='Medium',
121                                          control_file='foo',
122                                          control_type=CLIENT,
123                                          hosts=['host1'],
124                                          keyvals=keyval_dict)
125        jobs = rpc_interface.get_jobs(id=job_id)
126        self.assertEquals(len(jobs), 1)
127        self.assertEquals(jobs[0]['keyvals'], keyval_dict)
128
129
130    def test_test_retry(self):
131        job_id = rpc_interface.create_job(name='flake', priority='Medium',
132                                          control_file='foo',
133                                          control_type=CLIENT,
134                                          hosts=['host1'],
135                                          test_retry=10)
136        jobs = rpc_interface.get_jobs(id=job_id)
137        self.assertEquals(len(jobs), 1)
138        self.assertEquals(jobs[0]['test_retry'], 10)
139
140
141    def test_get_jobs_summary(self):
142        job = self._create_job(hosts=xrange(1, 4))
143        entries = list(job.hostqueueentry_set.all())
144        entries[1].status = _hqe_status.FAILED
145        entries[1].save()
146        entries[2].status = _hqe_status.FAILED
147        entries[2].aborted = True
148        entries[2].save()
149
150        # Mock up tko_rpc_interface.get_status_counts.
151        self.god.stub_function_to_return(rpc_interface.tko_rpc_interface,
152                                         'get_status_counts',
153                                         None)
154
155        job_summaries = rpc_interface.get_jobs_summary(id=job.id)
156        self.assertEquals(len(job_summaries), 1)
157        summary = job_summaries[0]
158        self.assertEquals(summary['status_counts'], {'Queued': 1,
159                                                     'Failed': 2})
160
161
162    def _check_job_ids(self, actual_job_dicts, expected_jobs):
163        self.assertEquals(
164                set(job_dict['id'] for job_dict in actual_job_dicts),
165                set(job.id for job in expected_jobs))
166
167
168    def test_get_jobs_status_filters(self):
169        HqeStatus = models.HostQueueEntry.Status
170        def create_two_host_job():
171            return self._create_job(hosts=[1, 2])
172        def set_hqe_statuses(job, first_status, second_status):
173            entries = job.hostqueueentry_set.all()
174            entries[0].update_object(status=first_status)
175            entries[1].update_object(status=second_status)
176
177        queued = create_two_host_job()
178
179        queued_and_running = create_two_host_job()
180        set_hqe_statuses(queued_and_running, HqeStatus.QUEUED,
181                           HqeStatus.RUNNING)
182
183        running_and_complete = create_two_host_job()
184        set_hqe_statuses(running_and_complete, HqeStatus.RUNNING,
185                           HqeStatus.COMPLETED)
186
187        complete = create_two_host_job()
188        set_hqe_statuses(complete, HqeStatus.COMPLETED, HqeStatus.COMPLETED)
189
190        started_but_inactive = create_two_host_job()
191        set_hqe_statuses(started_but_inactive, HqeStatus.QUEUED,
192                           HqeStatus.COMPLETED)
193
194        parsing = create_two_host_job()
195        set_hqe_statuses(parsing, HqeStatus.PARSING, HqeStatus.PARSING)
196
197        self._check_job_ids(rpc_interface.get_jobs(not_yet_run=True), [queued])
198        self._check_job_ids(rpc_interface.get_jobs(running=True),
199                      [queued_and_running, running_and_complete,
200                       started_but_inactive, parsing])
201        self._check_job_ids(rpc_interface.get_jobs(finished=True), [complete])
202
203
204    def test_get_jobs_type_filters(self):
205        self.assertRaises(AssertionError, rpc_interface.get_jobs,
206                          suite=True, sub=True)
207        self.assertRaises(AssertionError, rpc_interface.get_jobs,
208                          suite=True, standalone=True)
209        self.assertRaises(AssertionError, rpc_interface.get_jobs,
210                          standalone=True, sub=True)
211
212        parent_job = self._create_job(hosts=[1])
213        child_jobs = self._create_job(hosts=[1, 2],
214                                      parent_job_id=parent_job.id)
215        standalone_job = self._create_job(hosts=[1])
216
217        self._check_job_ids(rpc_interface.get_jobs(suite=True), [parent_job])
218        self._check_job_ids(rpc_interface.get_jobs(sub=True), [child_jobs])
219        self._check_job_ids(rpc_interface.get_jobs(standalone=True),
220                            [standalone_job])
221
222
223    def _create_job_helper(self, **kwargs):
224        return rpc_interface.create_job(name='test', priority='Medium',
225                                        control_file='control file',
226                                        control_type=SERVER, **kwargs)
227
228
229    def test_one_time_hosts(self):
230        job = self._create_job_helper(one_time_hosts=['testhost'])
231        host = models.Host.objects.get(hostname='testhost')
232        self.assertEquals(host.invalid, True)
233        self.assertEquals(host.labels.count(), 0)
234        self.assertEquals(host.aclgroup_set.count(), 0)
235
236
237    def test_create_job_duplicate_hosts(self):
238        self.assertRaises(model_logic.ValidationError, self._create_job_helper,
239                          hosts=[1, 1])
240
241
242    def test_create_unrunnable_metahost_job(self):
243        self.assertRaises(error.NoEligibleHostException,
244                          self._create_job_helper, meta_hosts=['unused'])
245
246
247    def test_create_hostless_job(self):
248        job_id = self._create_job_helper(hostless=True)
249        job = models.Job.objects.get(pk=job_id)
250        queue_entries = job.hostqueueentry_set.all()
251        self.assertEquals(len(queue_entries), 1)
252        self.assertEquals(queue_entries[0].host, None)
253        self.assertEquals(queue_entries[0].meta_host, None)
254        self.assertEquals(queue_entries[0].atomic_group, None)
255
256
257    def _setup_special_tasks(self):
258        host = self.hosts[0]
259
260        job1 = self._create_job(hosts=[1])
261        job2 = self._create_job(hosts=[1])
262
263        entry1 = job1.hostqueueentry_set.all()[0]
264        entry1.update_object(started_on=datetime.datetime(2009, 1, 2),
265                             execution_subdir='host1')
266        entry2 = job2.hostqueueentry_set.all()[0]
267        entry2.update_object(started_on=datetime.datetime(2009, 1, 3),
268                             execution_subdir='host1')
269
270        self.task1 = models.SpecialTask.objects.create(
271                host=host, task=models.SpecialTask.Task.VERIFY,
272                time_started=datetime.datetime(2009, 1, 1), # ran before job 1
273                is_complete=True, requested_by=models.User.current_user())
274        self.task2 = models.SpecialTask.objects.create(
275                host=host, task=models.SpecialTask.Task.VERIFY,
276                queue_entry=entry2, # ran with job 2
277                is_active=True, requested_by=models.User.current_user())
278        self.task3 = models.SpecialTask.objects.create(
279                host=host, task=models.SpecialTask.Task.VERIFY,
280                requested_by=models.User.current_user()) # not yet run
281
282
283    def test_get_special_tasks(self):
284        self._setup_special_tasks()
285        tasks = rpc_interface.get_special_tasks(host__hostname='host1',
286                                                queue_entry__isnull=True)
287        self.assertEquals(len(tasks), 2)
288        self.assertEquals(tasks[0]['task'], models.SpecialTask.Task.VERIFY)
289        self.assertEquals(tasks[0]['is_active'], False)
290        self.assertEquals(tasks[0]['is_complete'], True)
291
292
293    def test_get_latest_special_task(self):
294        # a particular usage of get_special_tasks()
295        self._setup_special_tasks()
296        self.task2.time_started = datetime.datetime(2009, 1, 2)
297        self.task2.save()
298
299        tasks = rpc_interface.get_special_tasks(
300                host__hostname='host1', task=models.SpecialTask.Task.VERIFY,
301                time_started__isnull=False, sort_by=['-time_started'],
302                query_limit=1)
303        self.assertEquals(len(tasks), 1)
304        self.assertEquals(tasks[0]['id'], 2)
305
306
307    def _common_entry_check(self, entry_dict):
308        self.assertEquals(entry_dict['host']['hostname'], 'host1')
309        self.assertEquals(entry_dict['job']['id'], 2)
310
311
312    def test_get_host_queue_entries_and_special_tasks(self):
313        self._setup_special_tasks()
314
315        host = self.hosts[0].id
316        entries_and_tasks = (
317                rpc_interface.get_host_queue_entries_and_special_tasks(host))
318
319        paths = [entry['execution_path'] for entry in entries_and_tasks]
320        self.assertEquals(paths, ['hosts/host1/3-verify',
321                                  '2-autotest_system/host1',
322                                  'hosts/host1/2-verify',
323                                  '1-autotest_system/host1',
324                                  'hosts/host1/1-verify'])
325
326        verify2 = entries_and_tasks[2]
327        self._common_entry_check(verify2)
328        self.assertEquals(verify2['type'], 'Verify')
329        self.assertEquals(verify2['status'], 'Running')
330        self.assertEquals(verify2['execution_path'], 'hosts/host1/2-verify')
331
332        entry2 = entries_and_tasks[1]
333        self._common_entry_check(entry2)
334        self.assertEquals(entry2['type'], 'Job')
335        self.assertEquals(entry2['status'], 'Queued')
336        self.assertEquals(entry2['started_on'], '2009-01-03 00:00:00')
337
338
339    def test_view_invalid_host(self):
340        # RPCs used by View Host page should work for invalid hosts
341        self._create_job_helper(hosts=[1])
342        host = self.hosts[0]
343        host.delete()
344
345        self.assertEquals(1, rpc_interface.get_num_hosts(hostname='host1',
346                                                         valid_only=False))
347        data = rpc_interface.get_hosts(hostname='host1', valid_only=False)
348        self.assertEquals(1, len(data))
349
350        self.assertEquals(1, rpc_interface.get_num_host_queue_entries(
351                host__hostname='host1'))
352        data = rpc_interface.get_host_queue_entries(host__hostname='host1')
353        self.assertEquals(1, len(data))
354
355        count = rpc_interface.get_num_host_queue_entries_and_special_tasks(
356                host=host.id)
357        self.assertEquals(1, count)
358        data = rpc_interface.get_host_queue_entries_and_special_tasks(
359                host=host.id)
360        self.assertEquals(1, len(data))
361
362
363    def test_reverify_hosts(self):
364        hostname_list = rpc_interface.reverify_hosts(id__in=[1, 2])
365        self.assertEquals(hostname_list, ['host1', 'host2'])
366        tasks = rpc_interface.get_special_tasks()
367        self.assertEquals(len(tasks), 2)
368        self.assertEquals(set(task['host']['id'] for task in tasks),
369                          set([1, 2]))
370
371        task = tasks[0]
372        self.assertEquals(task['task'], models.SpecialTask.Task.VERIFY)
373        self.assertEquals(task['requested_by'], 'autotest_system')
374
375
376    def test_repair_hosts(self):
377        hostname_list = rpc_interface.repair_hosts(id__in=[1, 2])
378        self.assertEquals(hostname_list, ['host1', 'host2'])
379        tasks = rpc_interface.get_special_tasks()
380        self.assertEquals(len(tasks), 2)
381        self.assertEquals(set(task['host']['id'] for task in tasks),
382                          set([1, 2]))
383
384        task = tasks[0]
385        self.assertEquals(task['task'], models.SpecialTask.Task.REPAIR)
386        self.assertEquals(task['requested_by'], 'autotest_system')
387
388
389    def test_parameterized_job(self):
390        global_config.global_config.override_config_value(
391                'AUTOTEST_WEB', 'parameterized_jobs', 'True')
392
393        string_type = model_attributes.ParameterTypes.STRING
394
395        test = models.Test.objects.create(
396                name='test', test_type=control_data.CONTROL_TYPE.SERVER)
397        test_parameter = test.testparameter_set.create(name='key')
398        profiler = models.Profiler.objects.create(name='profiler')
399
400        kernels = ({'version': 'version', 'cmdline': 'cmdline'},)
401        profilers = ('profiler',)
402        profiler_parameters = {'profiler': {'key': ('value', string_type)}}
403        job_parameters = {'key': ('value', string_type)}
404
405        job_id = rpc_interface.create_parameterized_job(
406                name='job', priority=priorities.Priority.DEFAULT, test='test',
407                parameters=job_parameters, kernel=kernels, label='label1',
408                profilers=profilers, profiler_parameters=profiler_parameters,
409                profile_only=False, hosts=('host1',))
410        parameterized_job = models.Job.smart_get(job_id).parameterized_job
411
412        self.assertEqual(parameterized_job.test, test)
413        self.assertEqual(parameterized_job.label, self.labels[0])
414        self.assertEqual(parameterized_job.kernels.count(), 1)
415        self.assertEqual(parameterized_job.profilers.count(), 1)
416
417        kernel = models.Kernel.objects.get(**kernels[0])
418        self.assertEqual(parameterized_job.kernels.all()[0], kernel)
419        self.assertEqual(parameterized_job.profilers.all()[0], profiler)
420
421        parameterized_profiler = models.ParameterizedJobProfiler.objects.get(
422                parameterized_job=parameterized_job, profiler=profiler)
423        profiler_parameters_obj = (
424                models.ParameterizedJobProfilerParameter.objects.get(
425                parameterized_job_profiler=parameterized_profiler))
426        self.assertEqual(profiler_parameters_obj.parameter_name, 'key')
427        self.assertEqual(profiler_parameters_obj.parameter_value, 'value')
428        self.assertEqual(profiler_parameters_obj.parameter_type, string_type)
429
430        self.assertEqual(
431                parameterized_job.parameterizedjobparameter_set.count(), 1)
432        parameters_obj = (
433                parameterized_job.parameterizedjobparameter_set.all()[0])
434        self.assertEqual(parameters_obj.test_parameter, test_parameter)
435        self.assertEqual(parameters_obj.parameter_value, 'value')
436        self.assertEqual(parameters_obj.parameter_type, string_type)
437
438
439    def _modify_host_helper(self, on_shard=False, host_on_shard=False):
440        shard_hostname = 'shard1'
441        if on_shard:
442            global_config.global_config.override_config_value(
443                'SHARD', 'shard_hostname', shard_hostname)
444
445        host = models.Host.objects.all()[0]
446        if host_on_shard:
447            shard = models.Shard.objects.create(hostname=shard_hostname)
448            host.shard = shard
449            host.save()
450
451        self.assertFalse(host.locked)
452
453        self.god.stub_class_method(frontend.AFE, 'run')
454
455        if host_on_shard and not on_shard:
456            mock_afe = self.god.create_mock_class_obj(
457                    frontend_wrappers.RetryingAFE, 'MockAFE')
458            self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
459
460            mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
461                    server=shard_hostname, user=None)
462            mock_afe2.run.expect_call('modify_host_local', id=host.id,
463                    locked=True, lock_reason='_modify_host_helper lock',
464                    lock_time=datetime.datetime(2015, 12, 15))
465        elif on_shard:
466            mock_afe = self.god.create_mock_class_obj(
467                    frontend_wrappers.RetryingAFE, 'MockAFE')
468            self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
469
470            mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
471                    server=server_utils.get_global_afe_hostname(), user=None)
472            mock_afe2.run.expect_call('modify_host', id=host.id,
473                    locked=True, lock_reason='_modify_host_helper lock',
474                    lock_time=datetime.datetime(2015, 12, 15))
475
476        rpc_interface.modify_host(id=host.id, locked=True,
477                                  lock_reason='_modify_host_helper lock',
478                                  lock_time=datetime.datetime(2015, 12, 15))
479
480        host = models.Host.objects.get(pk=host.id)
481        if on_shard:
482            # modify_host on shard does nothing but routing the RPC to master.
483            self.assertFalse(host.locked)
484        else:
485            self.assertTrue(host.locked)
486        self.god.check_playback()
487
488
489    def test_modify_host_on_master_host_on_master(self):
490        """Call modify_host to master for host in master."""
491        self._modify_host_helper()
492
493
494    def test_modify_host_on_master_host_on_shard(self):
495        """Call modify_host to master for host in shard."""
496        self._modify_host_helper(host_on_shard=True)
497
498
499    def test_modify_host_on_shard(self):
500        """Call modify_host to shard for host in shard."""
501        self._modify_host_helper(on_shard=True, host_on_shard=True)
502
503
504    def test_modify_hosts_on_master_host_on_shard(self):
505        """Ensure calls to modify_hosts are correctly forwarded to shards."""
506        host1 = models.Host.objects.all()[0]
507        host2 = models.Host.objects.all()[1]
508
509        shard1 = models.Shard.objects.create(hostname='shard1')
510        host1.shard = shard1
511        host1.save()
512
513        shard2 = models.Shard.objects.create(hostname='shard2')
514        host2.shard = shard2
515        host2.save()
516
517        self.assertFalse(host1.locked)
518        self.assertFalse(host2.locked)
519
520        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
521                                                  'MockAFE')
522        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
523
524        # The statuses of one host might differ on master and shard.
525        # Filters are always applied on the master. So the host on the shard
526        # will be affected no matter what his status is.
527        filters_to_use = {'status': 'Ready'}
528
529        mock_afe2 = frontend_wrappers.RetryingAFE.expect_new(
530                server='shard2', user=None)
531        mock_afe2.run.expect_call(
532            'modify_hosts_local',
533            host_filter_data={'id__in': [shard1.id, shard2.id]},
534            update_data={'locked': True,
535                         'lock_reason': 'Testing forward to shard',
536                         'lock_time' : datetime.datetime(2015, 12, 15) })
537
538        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
539                server='shard1', user=None)
540        mock_afe1.run.expect_call(
541            'modify_hosts_local',
542            host_filter_data={'id__in': [shard1.id, shard2.id]},
543            update_data={'locked': True,
544                         'lock_reason': 'Testing forward to shard',
545                         'lock_time' : datetime.datetime(2015, 12, 15)})
546
547        rpc_interface.modify_hosts(
548                host_filter_data={'status': 'Ready'},
549                update_data={'locked': True,
550                             'lock_reason': 'Testing forward to shard',
551                             'lock_time' : datetime.datetime(2015, 12, 15) })
552
553        host1 = models.Host.objects.get(pk=host1.id)
554        self.assertTrue(host1.locked)
555        host2 = models.Host.objects.get(pk=host2.id)
556        self.assertTrue(host2.locked)
557        self.god.check_playback()
558
559
560    def test_delete_host(self):
561        """Ensure an RPC is made on delete a host, if it is on a shard."""
562        host1 = models.Host.objects.all()[0]
563        shard1 = models.Shard.objects.create(hostname='shard1')
564        host1.shard = shard1
565        host1.save()
566        host1_id = host1.id
567
568        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
569                                                 'MockAFE')
570        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
571
572        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
573                server='shard1', user=None)
574        mock_afe1.run.expect_call('delete_host', id=host1.id)
575
576        rpc_interface.delete_host(id=host1.id)
577
578        self.assertRaises(models.Host.DoesNotExist,
579                          models.Host.smart_get, host1_id)
580
581        self.god.check_playback()
582
583
584    def test_modify_label(self):
585        label1 = models.Label.objects.all()[0]
586        self.assertEqual(label1.invalid, 0)
587
588        host2 = models.Host.objects.all()[1]
589        shard1 = models.Shard.objects.create(hostname='shard1')
590        host2.shard = shard1
591        host2.labels.add(label1)
592        host2.save()
593
594        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
595                                                  'MockAFE')
596        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
597
598        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
599                server='shard1', user=None)
600        mock_afe1.run.expect_call('modify_label', id=label1.id, invalid=1)
601
602        rpc_interface.modify_label(label1.id, invalid=1)
603
604        self.assertEqual(models.Label.objects.all()[0].invalid, 1)
605        self.god.check_playback()
606
607
608    def test_delete_label(self):
609        label1 = models.Label.objects.all()[0]
610
611        host2 = models.Host.objects.all()[1]
612        shard1 = models.Shard.objects.create(hostname='shard1')
613        host2.shard = shard1
614        host2.labels.add(label1)
615        host2.save()
616
617        mock_afe = self.god.create_mock_class_obj(frontend_wrappers.RetryingAFE,
618                                                  'MockAFE')
619        self.god.stub_with(frontend_wrappers, 'RetryingAFE', mock_afe)
620
621        mock_afe1 = frontend_wrappers.RetryingAFE.expect_new(
622                server='shard1', user=None)
623        mock_afe1.run.expect_call('delete_label', id=label1.id)
624
625        rpc_interface.delete_label(id=label1.id)
626
627        self.assertRaises(models.Label.DoesNotExist,
628                          models.Label.smart_get, label1.id)
629        self.god.check_playback()
630
631
632if __name__ == '__main__':
633    unittest.main()
634