Example #1
0
    def test_hosts_are_counted_once(self):
        mock_event = JobEvent(
            **{
                'event': 'playbook_on_stats',
                'event_data': {
                    'skipped': {
                        'localhost': 2,
                        'fiz': 1,
                    },
                    'ok': {
                        'localhost': 1,
                        'foo': 2,
                    },
                    'changed': {
                        'localhost': 1,
                        'bar': 3,
                    },
                    'dark': {
                        'localhost': 2,
                        'fiz': 2,
                    },
                },
            })

        assert mock_event.get_host_status_counts() == {
            'ok': 1,
            'changed': 1,
            'dark': 2
        }
Example #2
0
def test_host_summary_generation_with_deleted_hosts():
    hostnames = [f'Host {i}' for i in range(10)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
    j = Job(inventory=inv)
    j.save()
    host_map = dict((host.name, host.id) for host in inv.hosts.all())

    # delete half of the hosts during the playbook run
    for h in inv.hosts.all()[:5]:
        h.delete()

    JobEvent.create_from_data(
        job_id=j.pk,
        parent_uuid='abc123',
        event='playbook_on_stats',
        event_data={
            'ok': dict((hostname, len(hostname)) for hostname in hostnames),
            'changed': {},
            'dark': {},
            'failures': {},
            'ignored': {},
            'processed': {},
            'rescued': {},
            'skipped': {},
        },
        host_map=host_map,
    ).save()

    ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()])
    names = sorted([s.host_name for s in j.job_host_summaries.all()])
    assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10]
    assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9']
Example #3
0
def test_job_event_websocket_notifications(emit):
    j = Job(id=123)
    j.save()
    JobEvent.create_from_data(job_id=j.pk)
    assert len(emit.call_args_list) == 1
    topic, payload = emit.call_args_list[0][0]
    assert topic == 'job_events-123'
    assert payload['job'] == 123
Example #4
0
def setup_environment(inventory, project, machine_credential, host,
                      notification_template, label):
    '''
    Create old jobs and new jobs, with various other objects to hit the
    related fields of Jobs. This makes sure on_delete() effects are tested
    properly.
    '''
    old_jobs = []
    new_jobs = []
    days = 10
    days_str = str(days)

    jt = JobTemplate.objects.create(name='testjt',
                                    inventory=inventory,
                                    project=project)
    jt.credentials.add(machine_credential)
    jt_user = User.objects.create(username='******')
    jt.execute_role.members.add(jt_user)

    notification = Notification()
    notification.notification_template = notification_template
    notification.save()

    for i in range(3):
        job1 = jt.create_job()
        job1.created = datetime.now(tz=timezone('UTC'))
        job1.save()
        # create jobs with current time
        JobEvent.create_from_data(job_id=job1.pk,
                                  uuid='abc123',
                                  event='runner_on_start',
                                  stdout='a' * 1025).save()
        new_jobs.append(job1)

        job2 = jt.create_job()
        # create jobs 10 days ago
        job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days)
        job2.save()
        job2.dependent_jobs.add(job1)
        JobEvent.create_from_data(job_id=job2.pk,
                                  uuid='abc123',
                                  event='runner_on_start',
                                  stdout='a' * 1025).save()
        old_jobs.append(job2)

    jt.last_job = job2
    jt.current_job = job2
    jt.save()
    host.last_job = job2
    host.save()
    notification.unifiedjob_notifications.add(job2)
    label.unifiedjob_labels.add(job2)
    jn = WorkflowJobNode.objects.create(job=job2)
    jn.save()
    jh = JobHostSummary.objects.create(job=job2)
    jh.save()

    return (old_jobs, new_jobs, days_str)
Example #5
0
def test_really_long_event_fields(field):
    with mock.patch.object(JobEvent, 'objects') as manager:
        JobEvent.create_from_data(**{
            'job_id': 123,
            field: 'X' * 4096
        })
        manager.create.assert_called_with(**{
            'job_id': 123,
            field: 'X' * 1021 + '...'
        })
Example #6
0
def test_host_summary_generation_with_limit():
    # Make an inventory with 10 hosts, run a playbook with a --limit
    # pointed at *one* host,
    # Verify that *only* that host has an associated JobHostSummary and that
    # *only* that host has an updated value for .last_job.
    hostnames = [f'Host {i}' for i in range(10)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([
        Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
        for h in hostnames
    ])
    j = Job(inventory=inv)
    j.save()

    # host map is a data structure that tracks a mapping of host name --> ID
    # for the inventory, _regardless_ of whether or not there's a limit
    # applied to the actual playbook run
    host_map = dict((host.name, host.id) for host in inv.hosts.all())

    # by making the playbook_on_stats *only* include Host 1, we're emulating
    # the behavior of a `--limit=Host 1`
    matching_host = Host.objects.get(name='Host 1')
    JobEvent.create_from_data(
        job_id=j.pk,
        parent_uuid='abc123',
        event='playbook_on_stats',
        event_data={
            'ok': {
                matching_host.name: len(matching_host.name)
            },  # effectively, limit=Host 1
            'changed': {},
            'dark': {},
            'failures': {},
            'ignored': {},
            'processed': {},
            'rescued': {},
            'skipped': {},
        },
        host_map=host_map).save()

    # since the playbook_on_stats only references one host,
    # there should *only* be on JobHostSummary record (and it should
    # be related to the appropriate Host)
    assert JobHostSummary.objects.count() == 1
    for h in Host.objects.all():
        if h.name == 'Host 1':
            assert h.last_job_id == j.id
            assert h.last_job_host_summary_id == JobHostSummary.objects.first(
            ).id
        else:
            # all other hosts in the inventory should remain untouched
            assert h.last_job_id is None
            assert h.last_job_host_summary_id is None
Example #7
0
def test_job_events_sublist_truncation(get, organization_factory, job_template_factory, truncate, expected):
    objs = organization_factory("org", superusers=['admin'])
    jt = job_template_factory("jt", organization=objs.organization, inventory='test_inv', project='test_proj').job_template
    job = jt.create_unified_job()
    JobEvent.create_from_data(job_id=job.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025, job_created=job.created).save()

    url = reverse('api:job_job_events_list', kwargs={'pk': job.pk})
    if not truncate:
        url += '?no_truncate=1'

    response = get(url, user=objs.superusers.admin, expect=200)
    assert (len(response.data['results'][0]['stdout']) == 1025) == expected
def test_parent_failed(emit, event):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start')
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.failed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event)
    assert JobEvent.objects.count() == 2
    for e in JobEvent.objects.all():
        assert e.failed is True
Example #9
0
    def test_hosts_are_counted_once(self, job, mocker):
        mock_event = JobEvent(
            **{
                'event': 'playbook_on_stats',
                'event_data': {
                    'skipped': {
                        'localhost': 2,
                        'fiz': 1,
                    },
                    'ok': {
                        'localhost': 1,
                        'foo': 2,
                    },
                    'changed': {
                        'localhost': 1,
                        'bar': 3,
                    },
                    'dark': {
                        'localhost': 2,
                        'fiz': 2,
                    }
                }
            })

        mock_qs = namedtuple('mock_qs', ['get'])(
            mocker.MagicMock(return_value=mock_event))
        job.job_events.only = mocker.MagicMock(return_value=mock_qs)

        serializer = JobDetailSerializer()
        host_status_counts = serializer.get_host_status_counts(job)

        assert host_status_counts == {'ok': 1, 'changed': 1, 'dark': 2}
def test_really_long_event_fields(field):
    event = JobEvent.create_from_data(**{
        'job_id': 123,
        'event_data': {
            field: 'X' * 4096
        }
    })
    assert event.event_data[field] == 'X' * 1023 + '…'
Example #11
0
def test_host_summary_generation():
    hostnames = [f'Host {i}' for i in range(100)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([
        Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
        for h in hostnames
    ])
    j = Job(inventory=inv)
    j.save()
    host_map = dict((host.name, host.id) for host in inv.hosts.all())
    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='playbook_on_stats',
                              event_data={
                                  'ok':
                                  dict((hostname, len(hostname))
                                       for hostname in hostnames),
                                  'changed': {},
                                  'dark': {},
                                  'failures': {},
                                  'ignored': {},
                                  'processed': {},
                                  'rescued': {},
                                  'skipped': {},
                              },
                              host_map=host_map).save()

    assert j.job_host_summaries.count() == len(hostnames)
    assert sorted([s.host_name
                   for s in j.job_host_summaries.all()]) == sorted(hostnames)

    for s in j.job_host_summaries.all():
        assert host_map[s.host_name] == s.host_id
        assert s.ok == len(s.host_name)
        assert s.changed == 0
        assert s.dark == 0
        assert s.failures == 0
        assert s.ignored == 0
        assert s.processed == 0
        assert s.rescued == 0
        assert s.skipped == 0

    for host in Host.objects.all():
        assert host.last_job_id == j.id
        assert host.last_job_host_summary.host == host
def test_parent_changed(emit):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start')
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.changed is False

    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='runner_on_ok',
                              event_data={'res': {
                                  'changed': ['localhost']
                              }})
    assert JobEvent.objects.count() == 2
    for e in JobEvent.objects.all():
        assert e.changed is True
Example #13
0
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
    job = Job()
    job.save()
    for i in range(3):
        JobEvent(job=job, stdout=u'オ{}\n'.format(i), start_line=i).save()
    url = reverse('api:job_stdout', kwargs={
        'pk': job.pk
    }) + '?format=json&content_encoding=base64&content_format=ansi'

    response = get(url, user=admin, expect=200)
    content = base64.b64decode(json.loads(response.content)['content'])
    assert content.splitlines() == ['オ%d' % i for i in range(3)]
Example #14
0
 def job_events(self, epoch):
     return [
         JobEvent(created=epoch),
         JobEvent(created=epoch + timedelta(seconds=10)),
         JobEvent(created=epoch + timedelta(seconds=20)),
         JobEvent(created=epoch + timedelta(seconds=30)),
         JobEvent(created=epoch + timedelta(seconds=31)),
         JobEvent(created=epoch + timedelta(seconds=31, milliseconds=1)),
         JobEvent(created=epoch + timedelta(seconds=31, microseconds=1, milliseconds=1)),
     ]
Example #15
0
def test_parent_changed(emit):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start').save()
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.changed is False

    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='runner_on_ok',
                              event_data={
                                  'res': {
                                      'changed': ['localhost']
                                  }
                              }).save()
    # the `playbook_on_stats` event is where we update the parent changed linkage
    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='playbook_on_stats').save()
    events = JobEvent.objects.filter(
        event__in=['playbook_on_task_start', 'runner_on_ok'])
    assert events.count() == 2
    for e in events.all():
        assert e.changed is True
Example #16
0
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
    created = datetime.utcnow()
    job = Job(created=created)
    job.save()
    for i in range(3):
        JobEvent(job=job,
                 stdout='オ{}\n'.format(i),
                 start_line=i,
                 job_created=created).save()
    url = reverse('api:job_stdout', kwargs={
        'pk': job.pk
    }) + '?format=json&content_encoding=base64'

    response = get(url, user=admin, expect=200)
    content = base64.b64decode(
        json.loads(smart_str(response.content))['content'])
    assert smart_str(content).splitlines() == ['オ%d' % i for i in range(3)]
Example #17
0
def test_log_from_job_event_object():
    job = Job(id=4)
    event = JobEvent(job_id=job.id)
    formatter = LogstashFormatter()

    data_for_log = formatter.reformat_data_for_log(
        dict(python_objects=dict(job_event=event)), kind='job_events')

    # Check entire body of data for any exceptions from getattr on event object
    for fd in data_for_log:
        if not isinstance(data_for_log[fd], basestring):
            continue
        assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])

    # Verify existence of certain high-importance fields
    for fd in ['changed', 'uuid', 'start_line', 'end_line', 'id', 'counter', 'host_name', 'stdout']:
        assert fd in data_for_log

    assert data_for_log['job'] == 4
Example #18
0
def test_parent_failed(emit, event):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.failed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save()

    # the `playbook_on_stats` event is where we update the parent failed linkage
    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
    events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event])
    assert events.count() == 2
    for e in events.all():
        assert e.failed is True
Example #19
0
def test_job_job_events_children_summary_is_tree(get, organization_factory,
                                                 job_template_factory):
    '''
    children_summary should return {is_tree: False} if the event structure is not tree-like
    '''
    objs = organization_factory("org", superusers=['admin'])
    jt = job_template_factory("jt",
                              organization=objs.organization,
                              inventory='test_inv',
                              project='test_proj').job_template
    job = jt.create_unified_job()
    url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk})
    response = get(url, user=objs.superusers.admin, expect=200)
    assert response.data["event_processing_finished"] == False
    '''
    E1
      E2
        E3
        E4 (verbose)
      E5
        E6 <-- parent is E2, but comes after another "branch" E5
    '''
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid1',
                              parent_uuid='',
                              event="playbook_on_start",
                              counter=1,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid2',
                              parent_uuid='uuid1',
                              event="playbook_on_play_start",
                              counter=2,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid3',
                              parent_uuid='uuid2',
                              event="playbook_on_task_start",
                              counter=3,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid4',
                              parent_uuid='',
                              event='verbose',
                              counter=4,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid5',
                              parent_uuid='uuid1',
                              event="playbook_on_play_start",
                              counter=5,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid6',
                              parent_uuid='uuid2',
                              event="playbook_on_task_start",
                              counter=6,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    job.emitted_events = job.get_event_queryset().count()
    job.status = "successful"
    job.save()
    url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk})
    response = get(url, user=objs.superusers.admin, expect=200)
    assert response.data["children_summary"] == {}
    assert response.data["meta_event_nested_uuid"] == {}
    assert response.data["event_processing_finished"] == True
    assert response.data["is_tree"] == False
Example #20
0
def make_the_data():
    with disable_activity_stream():
        with batch_role_ancestor_rebuilding(), disable_computed_fields():
            admin, created = User.objects.get_or_create(username='******',
                                                        is_superuser=True)
            if created:
                admin.is_superuser = True
                admin.save()
                admin.set_password('test')
                admin.save()

            org_admin, created = User.objects.get_or_create(
                username='******')
            if created:
                org_admin.set_password('test')
                org_admin.save()

            org_member, created = User.objects.get_or_create(
                username='******')
            if created:
                org_member.set_password('test')
                org_member.save()

            prj_admin, created = User.objects.get_or_create(
                username='******')
            if created:
                prj_admin.set_password('test')
                prj_admin.save()

            jt_admin, created = User.objects.get_or_create(username='******')
            if created:
                jt_admin.set_password('test')
                jt_admin.save()

            inv_admin, created = User.objects.get_or_create(
                username='******')
            if created:
                inv_admin.set_password('test')
                inv_admin.save()

            print('# Creating %d organizations' % n_organizations)
            for i in range(n_organizations):
                sys.stdout.write('\r%d     ' % (i + 1))
                sys.stdout.flush()
                org, _ = Organization.objects.get_or_create(
                    name='%s Organization %d' % (prefix, i))
                organizations.append(org)
                if i == 0:
                    org.admin_role.members.add(org_admin)
                    org.member_role.members.add(org_admin)
                    org.member_role.members.add(org_member)
                    org.member_role.members.add(prj_admin)
                    org.member_role.members.add(jt_admin)
                    org.member_role.members.add(inv_admin)

            print('')

            print('# Creating %d users' % n_users)
            org_idx = 0
            for n in spread(n_users, n_organizations):
                for i in range(n):
                    ids['user'] += 1
                    user_id = ids['user']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, organizations[org_idx].name, i + 1))
                    sys.stdout.flush()
                    user, _ = User.objects.get_or_create(username='******' %
                                                         (prefix, user_id))
                    organizations[org_idx].member_role.members.add(user)
                    users.append(user)
                org_idx += 1
                print('')

            creator_gen = yield_choice(users)
            for i in range(6):
                next(creator_gen)
            modifier_gen = yield_choice(users)

            print('# Creating %d teams' % n_teams)
            org_idx = 0
            for n in spread(n_teams, n_organizations):
                org = organizations[org_idx]
                for i in range(n):
                    ids['team'] += 1
                    team_id = ids['team']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, org.name, i + 1))
                    sys.stdout.flush()
                    team, _ = Team.objects.get_or_create(
                        name='%s Team %d Org %d' % (prefix, team_id, org_idx),
                        organization=org,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)))
                    teams.append(team)
                org_idx += 1
                print('')

            print('# Adding users to teams')
            for org in organizations:
                org_teams = [t for t in org.teams.all()]
                org_users = [u for u in org.member_role.members.all()]
                print('  Spreading %d users accross %d teams for %s' %
                      (len(org_users), len(org_teams), org.name))
                # Our normal spread for most users
                cur_user_idx = 0
                cur_team_idx = 0
                for n in spread(len(org_users), len(org_teams)):
                    team = org_teams[cur_team_idx]
                    for i in range(n):
                        if cur_user_idx < len(org_users):
                            user = org_users[cur_user_idx]
                            team.member_role.members.add(user)
                        cur_user_idx += 1
                    cur_team_idx += 1

                # First user gets added to all teams
                for team in org_teams:
                    team.member_role.members.add(org_users[0])

            print('# Creating %d credentials for users' %
                  (n_credentials - n_credentials // 2))
            user_idx = 0
            for n in spread(n_credentials - n_credentials // 2, n_users):
                user = users[user_idx]
                for i in range(n):
                    ids['credential'] += 1
                    sys.stdout.write('\r   %d     ' % (ids['credential']))
                    sys.stdout.flush()
                    credential_id = ids['credential']
                    credential, _ = Credential.objects.get_or_create(
                        name='%s Credential %d User %d' %
                        (prefix, credential_id, user_idx),
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)),
                        credential_type=CredentialType.from_v1_kind('ssh'))
                    credential.admin_role.members.add(user)
                    credentials.append(credential)
                user_idx += 1
            print('')

            credential_gen = yield_choice(credentials)

            print('# Creating %d credentials for teams' % (n_credentials // 2))
            team_idx = 0
            starting_credential_id = ids['credential']
            for n in spread(n_credentials - n_credentials // 2, n_teams):
                team = teams[team_idx]
                for i in range(n):
                    ids['credential'] += 1
                    sys.stdout.write(
                        '\r   %d     ' %
                        (ids['credential'] - starting_credential_id))
                    sys.stdout.flush()
                    credential_id = ids['credential']
                    credential, _ = Credential.objects.get_or_create(
                        name='%s Credential %d team %d' %
                        (prefix, credential_id, team_idx),
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)),
                        credential_type=CredentialType.from_v1_kind('ssh'))
                    credential.admin_role.parents.add(team.member_role)
                    credentials.append(credential)
                team_idx += 1
            print('')

            print('# Creating %d projects' % n_projects)
            org_idx = 0
            for n in spread(n_projects, n_organizations):
                org = organizations[org_idx]
                for i in range(n):
                    ids['project'] += 1
                    project_id = ids['project']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, org.name, i + 1))
                    sys.stdout.flush()
                    project, _ = Project.objects.get_or_create(
                        name='%s Project %d Org %d' %
                        (prefix, project_id, org_idx),
                        organization=org,
                        defaults=dict(
                            created_by=next(creator_gen),
                            modified_by=next(modifier_gen),
                            scm_url=
                            'https://github.com/jlaska/ansible-playbooks.git',
                            scm_type='git',
                            playbook_files=[
                                "check.yml", "debug-50.yml", "debug.yml",
                                "debug2.yml", "debug_extra_vars.yml",
                                "dynamic_inventory.yml", "environ_test.yml",
                                "fail_unless.yml", "pass_unless.yml",
                                "pause.yml", "ping-20.yml", "ping.yml",
                                "setfact_50.yml", "vault.yml"
                            ]))
                    projects.append(project)
                    if org_idx == 0 and i == 0:
                        project.admin_role.members.add(prj_admin)

                org_idx += 1
                print('')

            print('# Creating %d inventories' % n_inventories)
            org_idx = 0
            for n in spread(n_inventories,
                            min(n_inventories // 4 + 1, n_organizations)):
                org = organizations[org_idx]
                for i in range(n):
                    ids['inventory'] += 1
                    inventory_id = ids['inventory']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, org.name, i + 1))
                    sys.stdout.flush()
                    inventory, _ = Inventory.objects.get_or_create(
                        name='%s Inventory %d Org %d' %
                        (prefix, inventory_id, org_idx),
                        organization=org,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)),
                        variables='{"ansible_connection": "local"}')
                    inventories.append(inventory)
                    if org_idx == 0 and i == 0:
                        inventory.admin_role.members.add(inv_admin)

                org_idx += 1
                print('')

            print('# Creating %d inventory_groups' % n_inventory_groups)
            inv_idx = 0
            for n in spread(n_inventory_groups, n_inventories):
                inventory = inventories[inv_idx]
                parent_list = [None] * 3
                for i in range(n):
                    ids['group'] += 1
                    group_id = ids['group']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, inventory.name, i + 1))
                    sys.stdout.flush()
                    group, _ = Group.objects.get_or_create(
                        name='%s Group %d Inventory %d' %
                        (prefix, group_id, inv_idx),
                        inventory=inventory,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)))
                    # Have each group have up to 3 parent groups
                    for parent_n in range(3):
                        if i // 4 + parent_n < len(
                                parent_list) and parent_list[i // 4 +
                                                             parent_n]:
                            group.parents.add(parent_list[i // 4 + parent_n])
                    if parent_list[i // 4] is None:
                        parent_list[i // 4] = group
                    else:
                        parent_list.append(group)
                    inventory_groups.append(group)

                inv_idx += 1
                print('')

            print('# Creating %d inventory_hosts' % n_inventory_hosts)
            group_idx = 0
            for n in spread(n_inventory_hosts, n_inventory_groups):
                group = inventory_groups[group_idx]
                for i in range(n):
                    ids['host'] += 1
                    host_id = ids['host']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, group.name, i + 1))
                    sys.stdout.flush()
                    host, _ = Host.objects.get_or_create(
                        name='%s.host-%06d.group-%05d.dummy' %
                        (prefix, host_id, group_idx),
                        inventory=group.inventory,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)))
                    # Add the host to up to 3 groups
                    host.groups.add(group)
                    for m in range(2):
                        if group_idx + m < len(
                                inventory_groups
                        ) and group.inventory.id == inventory_groups[
                                group_idx + m].inventory.id:
                            host.groups.add(inventory_groups[group_idx + m])

                    inventory_hosts.append(host)

                group_idx += 1
                print('')

            print('# Creating %d job_templates' % n_job_templates)
            project_idx = 0
            inv_idx = 0
            for n in spread(n_job_templates, n_projects):
                project = projects[project_idx]
                for i in range(n):
                    ids['job_template'] += 1
                    job_template_id = ids['job_template']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, project.name, i + 1))
                    sys.stdout.flush()

                    inventory = None
                    org_inv_count = project.organization.inventories.count()
                    if org_inv_count > 0:
                        inventory = project.organization.inventories.all()[
                            inv_idx % org_inv_count]
                    extra_kwargs = {}

                    job_template, _ = JobTemplate.objects.get_or_create(
                        name='%s Job Template %d Project %d' %
                        (prefix, job_template_id, project_idx),
                        defaults=dict(inventory=inventory,
                                      project=project,
                                      created_by=next(creator_gen),
                                      modified_by=next(modifier_gen),
                                      playbook="debug.yml",
                                      **extra_kwargs))
                    job_template.credentials.add(next(credential_gen))
                    if ids['job_template'] % 7 == 0:
                        job_template.credentials.add(next(credential_gen))
                    if ids['job_template'] % 5 == 0:  # formerly cloud credential
                        job_template.credentials.add(next(credential_gen))
                    job_template._is_new = _
                    job_templates.append(job_template)
                    inv_idx += 1
                    if project_idx == 0 and i == 0:
                        job_template.admin_role.members.add(jt_admin)
                project_idx += 1
                if n > 0:
                    print('')

            print('# Creating %d Workflow Job Templates' % n_wfjts)
            org_idx = 0
            for n in spread(n_wfjts, n_organizations):
                org = organizations[org_idx]
                for i in range(n):
                    ids['wfjts'] += 1
                    wfjt_id = ids['wfjts']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, org.name, i + 1))
                    sys.stdout.flush()
                    wfjt, _ = WorkflowJobTemplate.objects.get_or_create(
                        name='%s WFJT %d Org %d' % (prefix, wfjt_id, org_idx),
                        description=bulk_data_description,
                        organization=org,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)))
                    wfjt._is_new = _
                    wfjts.append(wfjt)
                org_idx += 1
                if n:
                    print('')

            print('# Creating %d Workflow Job Template nodes' % n_nodes)
            wfjt_idx = 0
            for n in spread(n_nodes, n_wfjts):
                wfjt = wfjts[wfjt_idx]
                if not wfjt._is_new:
                    continue
                jt_gen = yield_choice(job_templates)
                inv_gen = yield_choice(inventories)
                cred_gen = yield_choice(credentials)
                parent_idx = 0
                wfjt_nodes = []
                for i in range(n):
                    ids['nodes'] += 1
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, wfjt.name, i + 1))
                    sys.stdout.flush()
                    kwargs = dict(workflow_job_template=wfjt,
                                  unified_job_template=next(jt_gen),
                                  modified=now())
                    if i % 2 == 0:
                        # only apply inventories for every other node
                        kwargs['inventory'] = next(inv_gen)
                    node, _ = WorkflowJobTemplateNode.objects.get_or_create(
                        **kwargs)
                    if i % 3 == 0:
                        # only apply prompted credential every 3rd node
                        node.credentials.add(next(cred_gen))
                    # nodes.append(node)
                    wfjt_nodes.append(node)
                    if i <= 3:
                        continue
                    parent_node = wfjt_nodes[parent_idx]
                    if parent_node.workflow_job_template != node.workflow_job_template:
                        raise Exception(
                            "Programming error, associating nodes in different workflows"
                        )
                    elif parent_node == node:
                        raise Exception("error, self association")
                    if parent_idx % 2 == 0:
                        parent_node.always_nodes.add(node)
                    else:
                        if (i + 1) % 3 == 0:
                            parent_node.failure_nodes.add(node)
                        else:
                            parent_node.success_nodes.add(node)
                    parent_idx = (parent_idx + 7) % len(wfjt_nodes)
                wfjt_idx += 1
                if n:
                    print('')

            print('# Creating %d Labels' % n_labels)
            org_idx = 0
            for n in spread(n_labels, n_organizations):
                org = organizations[org_idx]
                for i in range(n):
                    ids['labels'] += 1
                    label_id = ids['labels']
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, org.name, i + 1))
                    sys.stdout.flush()
                    label, _ = Label.objects.get_or_create(
                        name='%sL_%do%d' % (prefix, label_id, org_idx),
                        organization=org,
                        defaults=dict(created_by=next(creator_gen),
                                      modified_by=next(modifier_gen)))
                    labels.append(label)
                org_idx += 1
                if n:
                    print('')
            label_gen = yield_choice(labels)

            print('# Adding labels to job templates')
            jt_idx = 0
            for n in spread(n_labels * 7, n_job_templates):
                if n == 0:
                    continue
                jt = job_templates[jt_idx]
                if not jt._is_new:
                    continue
                print('  Giving %d labels to %s JT' % (n, jt.name))
                for i in range(n):
                    jt.labels.add(next(label_gen))
                jt_idx += 1

            print('# Adding labels to workflow job templates')
            wfjt_idx = 0
            for n in spread(n_labels * 3, n_wfjts):
                wfjt = wfjts[wfjt_idx]
                if not jt._is_new:
                    continue
                print('  Giving %d labels to %s WFJT' % (n, wfjt.name))
                for i in range(n):
                    wfjt.labels.add(next(label_gen))
                wfjt_idx += 1

            # Disable logging here, because it will mess up output format
            logger = logging.getLogger('awx.main')
            logger.propagate = False

            print('# Creating %d jobs' % n_jobs)
            group_idx = 0
            job_template_idx = 0
            job_i = 0
            for n in spread(n_jobs, n_job_templates):
                job_template = job_templates[job_template_idx]
                for i in range(n):
                    sys.stdout.write('\r   Assigning %d to %s: %d     ' %
                                     (n, job_template.name, i + 1))
                    sys.stdout.flush()
                    if len(jobs) % 4 == 0:
                        job_stat = 'failed'
                    elif len(jobs) % 11 == 0:
                        job_stat = 'canceled'
                    else:
                        job_stat = 'successful'
                    job, _ = Job.objects.get_or_create(
                        job_template=job_template,
                        status=job_stat,
                        name="%s-%d" % (job_template.name, job_i),
                        project=job_template.project,
                        inventory=job_template.inventory,
                    )
                    for ec in job_template.credentials.all():
                        job.credentials.add(ec)
                    job._is_new = _
                    jobs.append(job)
                    job_i += 1
                    if not job._is_new:
                        group_idx += 1
                        continue
                    if i + 1 == n:
                        job_template.last_job = job
                        if job_template.pk % 5 == 0:
                            job_template.current_job = job
                        job_template.save()

                    if job._is_new:
                        with transaction.atomic():
                            if job_template.inventory:
                                inv_groups = [
                                    g for g in
                                    job_template.inventory.groups.all()
                                ]
                                if len(inv_groups):
                                    JobHostSummary.objects.bulk_create([
                                        JobHostSummary(job=job,
                                                       host=h,
                                                       host_name=h.name,
                                                       processed=1,
                                                       created=now(),
                                                       modified=now())
                                        for h in inv_groups[
                                            group_idx %
                                            len(inv_groups)].hosts.all()[:100]
                                    ])
                    group_idx += 1
                job_template_idx += 1
                if n:
                    print('')

            print('# Creating %d job events' % n_job_events)
            job_idx = 0
            for n in spread(n_job_events, n_jobs):
                job = jobs[job_idx]
                # Check if job already has events, for idempotence
                if not job._is_new:
                    continue
                # Bulk create in chunks with maximum chunk size
                MAX_BULK_CREATE = 100
                for j in range((n // MAX_BULK_CREATE) + 1):
                    n_subgroup = MAX_BULK_CREATE
                    if j == n / MAX_BULK_CREATE:
                        # on final pass, create the remainder
                        n_subgroup = n % MAX_BULK_CREATE
                    sys.stdout.write(
                        '\r   Creating %d job events for job %d, subgroup: %d'
                        % (n, job.id, j + 1))
                    sys.stdout.flush()
                    JobEvent.objects.bulk_create([
                        JobEvent(created=now(),
                                 modified=now(),
                                 job=job,
                                 event='runner_on_ok')
                        for i in range(n_subgroup)
                    ])
                job_idx += 1
                if n:
                    print('')
Example #21
0
def test_job_job_events_children_summary(get, organization_factory,
                                         job_template_factory):
    objs = organization_factory("org", superusers=['admin'])
    jt = job_template_factory("jt",
                              organization=objs.organization,
                              inventory='test_inv',
                              project='test_proj').job_template
    job = jt.create_unified_job()
    url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk})
    response = get(url, user=objs.superusers.admin, expect=200)
    assert response.data["event_processing_finished"] == False
    '''
    E1
      E2
        E3
        E4 (verbose)
      E5
    '''
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid1',
                              parent_uuid='',
                              event="playbook_on_start",
                              counter=1,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid2',
                              parent_uuid='uuid1',
                              event="playbook_on_play_start",
                              counter=2,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid3',
                              parent_uuid='uuid2',
                              event="runner_on_start",
                              counter=3,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid4',
                              parent_uuid='',
                              event='verbose',
                              counter=4,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    JobEvent.create_from_data(job_id=job.pk,
                              uuid='uuid5',
                              parent_uuid='uuid1',
                              event="playbook_on_task_start",
                              counter=5,
                              stdout='a' * 1024,
                              job_created=job.created).save()
    job.emitted_events = job.get_event_queryset().count()
    job.status = "successful"
    job.save()
    url = reverse('api:job_job_events_children_summary', kwargs={'pk': job.pk})
    response = get(url, user=objs.superusers.admin, expect=200)
    assert response.data["children_summary"] == {
        1: {
            "rowNumber": 0,
            "numChildren": 4
        },
        2: {
            "rowNumber": 1,
            "numChildren": 2
        }
    }
    assert response.data["meta_event_nested_uuid"] == {4: "uuid2"}
    assert response.data["event_processing_finished"] == True