def test_host_summary_generation_with_deleted_hosts(): hostnames = [f'Host {i}' for i in range(10)] inv = Inventory() inv.save() Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames]) j = Job(inventory=inv) j.save() host_map = dict((host.name, host.id) for host in inv.hosts.all()) # delete half of the hosts during the playbook run for h in inv.hosts.all()[:5]: h.delete() JobEvent.create_from_data( job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats', event_data={ 'ok': dict((hostname, len(hostname)) for hostname in hostnames), 'changed': {}, 'dark': {}, 'failures': {}, 'ignored': {}, 'processed': {}, 'rescued': {}, 'skipped': {}, }, host_map=host_map, ).save() ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()]) names = sorted([s.host_name for s in j.job_host_summaries.all()]) assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10] assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9']
def test_job_event_websocket_notifications(emit): j = Job(id=123) j.save() JobEvent.create_from_data(job_id=j.pk) assert len(emit.call_args_list) == 1 topic, payload = emit.call_args_list[0][0] assert topic == 'job_events-123' assert payload['job'] == 123
def setup_environment(inventory, project, machine_credential, host, notification_template, label): ''' Create old jobs and new jobs, with various other objects to hit the related fields of Jobs. This makes sure on_delete() effects are tested properly. ''' old_jobs = [] new_jobs = [] days = 10 days_str = str(days) jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project) jt.credentials.add(machine_credential) jt_user = User.objects.create(username='******') jt.execute_role.members.add(jt_user) notification = Notification() notification.notification_template = notification_template notification.save() for i in range(3): job1 = jt.create_job() job1.created = datetime.now(tz=timezone('UTC')) job1.save() # create jobs with current time JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() new_jobs.append(job1) job2 = jt.create_job() # create jobs 10 days ago job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days) job2.save() job2.dependent_jobs.add(job1) JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() old_jobs.append(job2) jt.last_job = job2 jt.current_job = job2 jt.save() host.last_job = job2 host.save() notification.unifiedjob_notifications.add(job2) label.unifiedjob_labels.add(job2) jn = WorkflowJobNode.objects.create(job=job2) jn.save() jh = JobHostSummary.objects.create(job=job2) jh.save() return (old_jobs, new_jobs, days_str)
def test_really_long_event_fields(field): with mock.patch.object(JobEvent, 'objects') as manager: JobEvent.create_from_data(**{ 'job_id': 123, field: 'X' * 4096 }) manager.create.assert_called_with(**{ 'job_id': 123, field: 'X' * 1021 + '...' })
def test_host_summary_generation_with_limit(): # Make an inventory with 10 hosts, run a playbook with a --limit # pointed at *one* host, # Verify that *only* that host has an associated JobHostSummary and that # *only* that host has an updated value for .last_job. hostnames = [f'Host {i}' for i in range(10)] inv = Inventory() inv.save() Host.objects.bulk_create([ Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames ]) j = Job(inventory=inv) j.save() # host map is a data structure that tracks a mapping of host name --> ID # for the inventory, _regardless_ of whether or not there's a limit # applied to the actual playbook run host_map = dict((host.name, host.id) for host in inv.hosts.all()) # by making the playbook_on_stats *only* include Host 1, we're emulating # the behavior of a `--limit=Host 1` matching_host = Host.objects.get(name='Host 1') JobEvent.create_from_data( job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats', event_data={ 'ok': { matching_host.name: len(matching_host.name) }, # effectively, limit=Host 1 'changed': {}, 'dark': {}, 'failures': {}, 'ignored': {}, 'processed': {}, 'rescued': {}, 'skipped': {}, }, host_map=host_map).save() # since the playbook_on_stats only references one host, # there should *only* be on JobHostSummary record (and it should # be related to the appropriate Host) assert JobHostSummary.objects.count() == 1 for h in Host.objects.all(): if h.name == 'Host 1': assert h.last_job_id == j.id assert h.last_job_host_summary_id == JobHostSummary.objects.first( ).id else: # all other hosts in the inventory should remain untouched assert h.last_job_id is None assert h.last_job_host_summary_id is None
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected): objs = organization_factory("org", superusers=['admin']) jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template job = jt.create_unified_job() JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025, job_created=job.created).save() url = reverse('api:job_job_events_list', kwargs={'pk': job.pk}) if not truncate: url += '?no_truncate=1' response = get(url, user=objs.superusers.admin, expect=200) assert (len(response.data['results'][0]['stdout']) == 1025) == expected
def test_parent_failed(emit, event): j = Job() j.save() JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start') assert JobEvent.objects.count() == 1 for e in JobEvent.objects.all(): assert e.failed is False JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event) assert JobEvent.objects.count() == 2 for e in JobEvent.objects.all(): assert e.failed is True
def test_really_long_event_fields(field): event = JobEvent.create_from_data(**{ 'job_id': 123, 'event_data': { field: 'X' * 4096 } }) assert event.event_data[field] == 'X' * 1023 + '…'
def test_host_summary_generation(): hostnames = [f'Host {i}' for i in range(100)] inv = Inventory() inv.save() Host.objects.bulk_create([ Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames ]) j = Job(inventory=inv) j.save() host_map = dict((host.name, host.id) for host in inv.hosts.all()) JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats', event_data={ 'ok': dict((hostname, len(hostname)) for hostname in hostnames), 'changed': {}, 'dark': {}, 'failures': {}, 'ignored': {}, 'processed': {}, 'rescued': {}, 'skipped': {}, }, host_map=host_map).save() assert j.job_host_summaries.count() == len(hostnames) assert sorted([s.host_name for s in j.job_host_summaries.all()]) == sorted(hostnames) for s in j.job_host_summaries.all(): assert host_map[s.host_name] == s.host_id assert s.ok == len(s.host_name) assert s.changed == 0 assert s.dark == 0 assert s.failures == 0 assert s.ignored == 0 assert s.processed == 0 assert s.rescued == 0 assert s.skipped == 0 for host in Host.objects.all(): assert host.last_job_id == j.id assert host.last_job_host_summary.host == host
def test_parent_changed(emit): j = Job() j.save() JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start') assert JobEvent.objects.count() == 1 for e in JobEvent.objects.all(): assert e.changed is False JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={'res': { 'changed': ['localhost'] }}) assert JobEvent.objects.count() == 2 for e in JobEvent.objects.all(): assert e.changed is True
def test_parent_changed(emit): j = Job() j.save() JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save() assert JobEvent.objects.count() == 1 for e in JobEvent.objects.all(): assert e.changed is False JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={ 'res': { 'changed': ['localhost'] } }).save() # the `playbook_on_stats` event is where we update the parent changed linkage JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save() events = JobEvent.objects.filter( event__in=['playbook_on_task_start', 'runner_on_ok']) assert events.count() == 2 for e in events.all(): assert e.changed is True
def test_parent_failed(emit, event): j = Job() j.save() JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save() assert JobEvent.objects.count() == 1 for e in JobEvent.objects.all(): assert e.failed is False JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save() # the `playbook_on_stats` event is where we update the parent failed linkage JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save() events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event]) assert events.count() == 2 for e in events.all(): assert e.failed is True
def test_job_job_events_children_summary(get, organization_factory, job_template_factory): objs = organization_factory("org", superusers=['admin']) jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template job = jt.create_unified_job() url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk}) response = get(url, user=objs.superusers.admin, expect=200) assert response.data["event_processing_finished"] == False ''' E1 E2 E3 E4 (verbose) E5 ''' JobEvent.create_from_data(job_id=job.pk, uuid='uuid1', parent_uuid='', event="playbook_on_start", counter=1, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid2', parent_uuid='uuid1', event="playbook_on_play_start", counter=2, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid3', parent_uuid='uuid2', event="runner_on_start", counter=3, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid4', parent_uuid='', event='verbose', counter=4, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid5', parent_uuid='uuid1', event="playbook_on_task_start", counter=5, stdout='a' * 1024, job_created=job.created).save() job.emitted_events = job.get_event_queryset().count() job.status = "successful" job.save() url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk}) response = get(url, user=objs.superusers.admin, expect=200) assert response.data["children_summary"] == { 1: { "rowNumber": 0, "numChildren": 4 }, 2: { "rowNumber": 1, "numChildren": 2 } } assert response.data["meta_event_nested_uuid"] == {4: "uuid2"} assert response.data["event_processing_finished"] == True
def test_job_job_events_children_summary_is_tree(get, organization_factory, job_template_factory): ''' children_summary should return {is_tree: False} if the event structure is not tree-like ''' objs = organization_factory("org", superusers=['admin']) jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template job = jt.create_unified_job() url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk}) response = get(url, user=objs.superusers.admin, expect=200) assert response.data["event_processing_finished"] == False ''' E1 E2 E3 E4 (verbose) E5 E6 <-- parent is E2, but comes after another "branch" E5 ''' JobEvent.create_from_data(job_id=job.pk, uuid='uuid1', parent_uuid='', event="playbook_on_start", counter=1, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid2', parent_uuid='uuid1', event="playbook_on_play_start", counter=2, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid3', parent_uuid='uuid2', event="playbook_on_task_start", counter=3, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid4', parent_uuid='', event='verbose', counter=4, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid5', parent_uuid='uuid1', event="playbook_on_play_start", counter=5, stdout='a' * 1024, job_created=job.created).save() JobEvent.create_from_data(job_id=job.pk, uuid='uuid6', parent_uuid='uuid2', event="playbook_on_task_start", counter=6, stdout='a' * 1024, job_created=job.created).save() job.emitted_events = job.get_event_queryset().count() job.status = "successful" job.save() url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk}) response = get(url, user=objs.superusers.admin, expect=200) assert response.data["children_summary"] == {} assert response.data["meta_event_nested_uuid"] == {} assert response.data["event_processing_finished"] == True assert response.data["is_tree"] == False