Example #1
0
def test_host_summary_generation_with_limit():
    # Make an inventory with 10 hosts, run a playbook with a --limit
    # pointed at *one* host,
    # Verify that *only* that host has an associated JobHostSummary and that
    # *only* that host has an updated value for .last_job.
    hostnames = [f'Host {i}' for i in range(10)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([
        Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
        for h in hostnames
    ])
    j = Job(inventory=inv)
    j.save()

    # host map is a data structure that tracks a mapping of host name --> ID
    # for the inventory, _regardless_ of whether or not there's a limit
    # applied to the actual playbook run
    host_map = dict((host.name, host.id) for host in inv.hosts.all())

    # by making the playbook_on_stats *only* include Host 1, we're emulating
    # the behavior of a `--limit=Host 1`
    matching_host = Host.objects.get(name='Host 1')
    JobEvent.create_from_data(
        job_id=j.pk,
        parent_uuid='abc123',
        event='playbook_on_stats',
        event_data={
            'ok': {
                matching_host.name: len(matching_host.name)
            },  # effectively, limit=Host 1
            'changed': {},
            'dark': {},
            'failures': {},
            'ignored': {},
            'processed': {},
            'rescued': {},
            'skipped': {},
        },
        host_map=host_map).save()

    # since the playbook_on_stats only references one host,
    # there should *only* be on JobHostSummary record (and it should
    # be related to the appropriate Host)
    assert JobHostSummary.objects.count() == 1
    for h in Host.objects.all():
        if h.name == 'Host 1':
            assert h.last_job_id == j.id
            assert h.last_job_host_summary_id == JobHostSummary.objects.first(
            ).id
        else:
            # all other hosts in the inventory should remain untouched
            assert h.last_job_id is None
            assert h.last_job_host_summary_id is None
Example #2
0
def test_survey_passwords_not_in_extra_vars():
    """Tests that survey passwords not included in extra_vars are
    not included when displaying job information"""
    job = Job(name="test-survey-not-in",
              extra_vars=json.dumps({'submitter_email': '*****@*****.**'}),
              survey_passwords={
                  'secret_key': '$encrypted$',
                  'SSN': '$encrypted$'
              })
    assert json.loads(job.display_extra_vars()) == {
        'submitter_email': '*****@*****.**',
    }
Example #3
0
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
    job = Job()
    job.save()
    for i in range(3):
        JobEvent(job=job, stdout=u'オ{}\n'.format(i), start_line=i).save()
    url = reverse('api:job_stdout', kwargs={
        'pk': job.pk
    }) + '?format=json&content_encoding=base64&content_format=ansi'

    response = get(url, user=admin, expect=200)
    content = base64.b64decode(json.loads(response.content)['content'])
    assert content.splitlines() == ['オ%d' % i for i in range(3)]
 def test_project_update_metavars(self):
     data = Job(name='fake-job',
                pk=40,
                id=40,
                launch_type='manual',
                project=Project(name='jobs-sync', scm_revision='12345444'),
                job_template=JobTemplate(name='jobs-jt', id=92,
                                         pk=92)).awx_meta_vars()
     assert data['awx_project_revision'] == '12345444'
     assert 'tower_job_template_id' in data
     assert data['tower_job_template_id'] == 92
     assert data['tower_job_template_name'] == 'jobs-jt'
Example #5
0
def spawn_bulk_jobs_simple(num):
    jobs = []
    for _ in range(num):
        j = Job()
        j.job_template = jt
        j.status = "canceled"
        jobs.append(j)
    with transaction.atomic():
        for i, j in enumerate(jobs):
            if i % 100 == 0:
                print(i)
            j.save()
Example #6
0
def job_with_survey():
    return Job(
        name="test-job-with-passwords",
        extra_vars=json.dumps({
            'submitter_email': '*****@*****.**',
            'secret_key': '6kQngg3h8lgiSTvIEb21',
            'SSN': '123-45-6789'
        }),
        survey_passwords={
            'secret_key': '$encrypted$',
            'SSN': '$encrypted$'
        },
    )
class TestCleanupInconsistentCeleryTasks():
    @mock.patch.object(cache, 'get', return_value=None)
    @mock.patch.object(TaskManager, 'get_active_tasks', return_value=([], {}))
    @mock.patch.object(TaskManager, 'get_running_tasks', return_value=({'host1': [Job(id=2), Job(id=3),]}, []))
    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch.object(Instance.objects, 'filter', return_value=mock.MagicMock(first=lambda: None))
    @mock.patch('awx.main.scheduler.task_manager.logger')
    def test_instance_does_not_exist(self, logger_mock, *args):
        logger_mock.error = mock.MagicMock(side_effect=RuntimeError("mocked"))
        tm = TaskManager()
        with pytest.raises(RuntimeError) as excinfo:
            tm.cleanup_inconsistent_celery_tasks()

        assert "mocked" in str(excinfo.value)
        logger_mock.error.assert_called_once_with("Execution node Instance host1 not found in database. "
                                                  "The node is currently executing jobs ['job 2 (new)', "
                                                  "'job 3 (new)']")

    @mock.patch.object(cache, 'get', return_value=None)
    @mock.patch.object(TaskManager, 'get_active_tasks', return_value=([], {'host1': []}))
    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch.object(TaskManager, 'get_running_tasks')
    @mock.patch('awx.main.scheduler.task_manager.logger')
    def test_save_failed(self, logger_mock, get_running_tasks, *args):
        logger_mock.error = mock.MagicMock()
        job = Job(id=2, modified=tz_now(), status='running', celery_task_id='blah', execution_node='host1')
        job.websocket_emit_status = mock.MagicMock()
        get_running_tasks.return_value = ({'host1': [job]}, [])
        tm = TaskManager()

        with mock.patch.object(job, 'save', side_effect=DatabaseError):
            tm.cleanup_inconsistent_celery_tasks()
            job.save.assert_called_once()
            logger_mock.error.assert_called_once_with("Task job 2 (failed) DB error in marking failed. Job possibly deleted.")

    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch('awx.main.scheduler.task_manager.Inspect')
    def test_multiple_active_instances_sanity_check(self, inspect_mock, *args):
        class MockInspector:
            pass

        mock_inspector = MockInspector()
        mock_inspector.active = lambda: {
            'celery@host1': [],
            'celery@host2': []
        }
        inspect_mock.return_value = mock_inspector
        tm = TaskManager()
        active_task_queues, queues = tm.get_active_tasks()
        assert 'host1' in queues
        assert 'host2' in queues
Example #8
0
    def test_job_relaunch_copy_vars(self, machine_credential, inventory, deploy_jobtemplate, post, mocker, net_credential):
        job_with_links = Job(name='existing-job', inventory=inventory)
        job_with_links.job_template = deploy_jobtemplate
        job_with_links.limit = "my_server"
        job_with_links.save()
        job_with_links.credentials.add(machine_credential)
        job_with_links.credentials.add(net_credential)
        second_job = job_with_links.copy_unified_job()

        # Check that job data matches the original variables
        assert [c.pk for c in second_job.credentials.all()] == [machine_credential.pk, net_credential.pk]
        assert second_job.inventory == job_with_links.inventory
        assert second_job.limit == 'my_server'
        assert net_credential in second_job.credentials.all()
Example #9
0
 def test_project_update_metavars(self):
     data = Job(
         name='fake-job',
         pk=40,
         id=40,
         launch_type='manual',
         project=Project(name='jobs-sync', scm_revision='12345444'),
         job_template=JobTemplate(name='jobs-jt', id=92, pk=92),
     ).awx_meta_vars()
     for name in JOB_VARIABLE_PREFIXES:
         assert data['{}_project_revision'.format(name)] == '12345444'
         assert '{}_job_template_id'.format(name) in data
         assert data['{}_job_template_id'.format(name)] == 92
         assert data['{}_job_template_name'.format(name)] == 'jobs-jt'
def test_parent_failed(emit, event):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start')
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.failed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event)
    assert JobEvent.objects.count() == 2
    for e in JobEvent.objects.all():
        assert e.failed is True
Example #11
0
def test_parent_changed(emit):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.changed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={'res': {'changed': ['localhost']}}).save()
    # the `playbook_on_stats` event is where we update the parent changed linkage
    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
    events = JobEvent.objects.filter(event__in=['playbook_on_task_start', 'runner_on_ok'])
    assert events.count() == 2
    for e in events.all():
        assert e.changed is True
Example #12
0
 def make_batch(N, **extra):
     jobs = [
         Job(status='canceled',
             created=now(),
             modified=now(),
             elapsed=0.,
             **extra) for i in range(N)
     ]
     ujs = UnifiedJob.objects.bulk_create(jobs)
     query = InsertQuery(Job)
     query.insert_values(fields, ujs)
     with connection.cursor() as cursor:
         query, params = query.sql_with_params()[0]
         cursor.execute(query, params)
     return ujs[-1]
Example #13
0
def test_parent_failed(emit, event):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.failed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save()

    # the `playbook_on_stats` event is where we update the parent failed linkage
    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
    events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event])
    assert events.count() == 2
    for e in events.all():
        assert e.failed is True
Example #14
0
 def test_job_metavars(self):
     maker = User(username='******', pk=47, id=47)
     assert Job(name='fake-job',
                pk=42,
                id=42,
                launch_type='manual',
                created_by=maker).awx_meta_vars() == {
                    'tower_job_id': 42,
                    'awx_job_id': 42,
                    'tower_job_launch_type': 'manual',
                    'awx_job_launch_type': 'manual',
                    'awx_user_name': 'joe',
                    'tower_user_name': 'joe',
                    'awx_user_id': 47,
                    'tower_user_id': 47
                }
Example #15
0
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
    created = datetime.utcnow()
    job = Job(created=created)
    job.save()
    for i in range(3):
        JobEvent(job=job,
                 stdout='オ{}\n'.format(i),
                 start_line=i,
                 job_created=created).save()
    url = reverse('api:job_stdout', kwargs={
        'pk': job.pk
    }) + '?format=json&content_encoding=base64'

    response = get(url, user=admin, expect=200)
    content = base64.b64decode(
        json.loads(smart_str(response.content))['content'])
    assert smart_str(content).splitlines() == ['オ%d' % i for i in range(3)]
Example #16
0
def test_host_summary_generation():
    hostnames = [f'Host {i}' for i in range(100)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([
        Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
        for h in hostnames
    ])
    j = Job(inventory=inv)
    j.save()
    host_map = dict((host.name, host.id) for host in inv.hosts.all())
    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='playbook_on_stats',
                              event_data={
                                  'ok':
                                  dict((hostname, len(hostname))
                                       for hostname in hostnames),
                                  'changed': {},
                                  'dark': {},
                                  'failures': {},
                                  'ignored': {},
                                  'processed': {},
                                  'rescued': {},
                                  'skipped': {},
                              },
                              host_map=host_map).save()

    assert j.job_host_summaries.count() == len(hostnames)
    assert sorted([s.host_name
                   for s in j.job_host_summaries.all()]) == sorted(hostnames)

    for s in j.job_host_summaries.all():
        assert host_map[s.host_name] == s.host_id
        assert s.ok == len(s.host_name)
        assert s.changed == 0
        assert s.dark == 0
        assert s.failures == 0
        assert s.ignored == 0
        assert s.processed == 0
        assert s.rescued == 0
        assert s.skipped == 0

    for host in Host.objects.all():
        assert host.last_job_id == j.id
        assert host.last_job_host_summary.host == host
Example #17
0
    def test_save_failed(self, logger_mock, get_running_tasks, *args):
        logger_mock.error = mock.MagicMock()
        job = Job(id=2,
                  modified=tz_now(),
                  status='running',
                  celery_task_id='blah',
                  execution_node='host1')
        job.websocket_emit_status = mock.MagicMock()
        get_running_tasks.return_value = ({'host1': [job]}, [])
        tm = TaskManager()

        with mock.patch.object(job, 'save', side_effect=DatabaseError):
            tm.cleanup_inconsistent_celery_tasks()
            job.save.assert_called_once()
            logger_mock.error.assert_called_once_with(
                "Task job 2 (failed) DB error in marking failed. Job possibly deleted."
            )
def test_parent_changed(emit):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start')
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.changed is False

    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='runner_on_ok',
                              event_data={'res': {
                                  'changed': ['localhost']
                              }})
    assert JobEvent.objects.count() == 2
    for e in JobEvent.objects.all():
        assert e.changed is True
Example #19
0
def test_log_from_job_event_object():
    job = Job(id=4)
    event = JobEvent(job_id=job.id)
    formatter = LogstashFormatter()

    data_for_log = formatter.reformat_data_for_log(
        dict(python_objects=dict(job_event=event)), kind='job_events')

    # Check entire body of data for any exceptions from getattr on event object
    for fd in data_for_log:
        if not isinstance(data_for_log[fd], basestring):
            continue
        assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])

    # Verify existence of certain high-importance fields
    for fd in ['changed', 'uuid', 'start_line', 'end_line', 'id', 'counter', 'host_name', 'stdout']:
        assert fd in data_for_log

    assert data_for_log['job'] == 4
Example #20
0
    def test_job_relaunch_copy_vars(self, machine_credential, inventory,
                                    deploy_jobtemplate, post, mocker,
                                    net_credential):
        job_with_links = Job(name='existing-job', inventory=inventory)
        job_with_links.job_template = deploy_jobtemplate
        job_with_links.limit = "my_server"
        job_with_links.save()
        job_with_links.credentials.add(machine_credential)
        job_with_links.credentials.add(net_credential)
        with mocker.patch(
                'awx.main.models.unified_jobs.UnifiedJobTemplate._get_unified_job_field_names',
                return_value=['inventory', 'credential', 'limit']):
            second_job = job_with_links.copy_unified_job()

        # Check that job data matches the original variables
        assert second_job.credential == job_with_links.credential
        assert second_job.inventory == job_with_links.inventory
        assert second_job.limit == 'my_server'
        assert net_credential in second_job.credentials.all()
Example #21
0
def test_vmware_create_fail_required_fields(post, organization, admin):
    params = {
        'credential_type': 1,
        'name': 'Best credential ever',
        'inputs': {}
    }
    vmware = CredentialType.defaults['vmware']()
    vmware.save()
    params['organization'] = organization.id
    response = post(reverse('api:credential_list'), params, admin)
    assert response.status_code == 201
    assert Credential.objects.count() == 1

    # username, password, and host must be specified by launch time
    j = Job()
    j.save()
    j.credentials.add(Credential.objects.first())
    assert j.pre_start() == (False, None)
    assert 'required fields (host, password, username)' in j.job_explanation
Example #22
0
def test_openstack_create_fail_required_fields(post, organization, admin):
    openstack = CredentialType.defaults['openstack']()
    openstack.save()
    params = {
        'credential_type': 1,
        'inputs': {},
        'kind': 'openstack',
        'name': 'Best credential ever',
        'organization': organization.id,
    }
    response = post(reverse('api:credential_list'), params, admin)
    assert response.status_code == 201

    # username, password, host, and project must be specified by launch time
    j = Job()
    j.save()
    j.credentials.add(Credential.objects.first())
    assert j.pre_start() == (False, None)
    assert 'required fields (host, password, project, username)' in j.job_explanation
Example #23
0
def test_vault_password_required(post, organization, admin):
    vault = CredentialType.defaults['vault']()
    vault.save()
    response = post(
        reverse('api:credential_list'), {
            'credential_type': vault.pk,
            'organization': organization.id,
            'name': 'Best credential ever',
            'inputs': {}
        }, admin)
    assert response.status_code == 201
    assert Credential.objects.count() == 1

    # vault_password must be specified by launch time
    j = Job()
    j.save()
    j.credentials.add(Credential.objects.first())
    assert j.pre_start() == (False, None)
    assert 'required fields (vault_password)' in j.job_explanation
Example #24
0
 def test_job_metavars(self):
     maker = User(username='******', pk=47, id=47)
     inv = Inventory(name='example-inv', id=45)
     result_hash = {}
     for name in JOB_VARIABLE_PREFIXES:
         result_hash['{}_job_id'.format(name)] = 42
         result_hash['{}_job_launch_type'.format(name)] = 'manual'
         result_hash['{}_user_name'.format(name)] = 'joe'
         result_hash['{}_user_email'.format(name)] = ''
         result_hash['{}_user_first_name'.format(name)] = ''
         result_hash['{}_user_last_name'.format(name)] = ''
         result_hash['{}_user_id'.format(name)] = 47
         result_hash['{}_inventory_id'.format(name)] = 45
         result_hash['{}_inventory_name'.format(name)] = 'example-inv'
     assert Job(name='fake-job',
                pk=42,
                id=42,
                launch_type='manual',
                created_by=maker,
                inventory=inv).awx_meta_vars() == result_hash
Example #25
0
def spawn_bulk_jobs(num):
    jobs = []
    for i in range(num):
        j = Job()
        j.job_template = jt
        j.project = project
        j.playbook = jt.playbook
        j.inventory = inv
        j.name = "bulk_{0}".format(i)
        j.status = "canceled"
        j.extra_vars = '{"sleeptime": 60}'
        j.allow_simultaneous = False
        jobs.append(j)
    with transaction.atomic():
        for i, j in enumerate(jobs):
            if i % 100 == 0:
                print(i)
                time.sleep(.5)
            j.save()
            j.credentials.add(cred)
Example #26
0
def mk_job(job_type='run',
           status='new',
           job_template=None,
           inventory=None,
           credential=None,
           project=None,
           extra_vars={},
           persisted=True):
    job = Job(job_type=job_type,
              status=status,
              extra_vars=json.dumps(extra_vars))

    job.job_template = job_template
    job.inventory = inventory
    if persisted:
        job.save()
        job.credentials.add(credential)
    job.project = project

    return job
Example #27
0
def test_host_summary_generation_with_deleted_hosts():
    hostnames = [f'Host {i}' for i in range(10)]
    inv = Inventory()
    inv.save()
    Host.objects.bulk_create([
        Host(created=now(), modified=now(), name=h, inventory_id=inv.id)
        for h in hostnames
    ])
    j = Job(inventory=inv)
    j.save()
    host_map = dict((host.name, host.id) for host in inv.hosts.all())

    # delete half of the hosts during the playbook run
    for h in inv.hosts.all()[:5]:
        h.delete()

    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='playbook_on_stats',
                              event_data={
                                  'ok':
                                  dict((hostname, len(hostname))
                                       for hostname in hostnames),
                                  'changed': {},
                                  'dark': {},
                                  'failures': {},
                                  'ignored': {},
                                  'processed': {},
                                  'rescued': {},
                                  'skipped': {},
                              },
                              host_map=host_map).save()

    ids = sorted(
        [s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()])
    names = sorted([s.host_name for s in j.job_host_summaries.all()])
    assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10]
    assert names == [
        'Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6',
        'Host 7', 'Host 8', 'Host 9'
    ]
Example #28
0
    def test_do_not_reap_excluded_uuids(self, excluded_uuids, fail):
        i = Instance(hostname='awx')
        i.save()
        j = Job(
            status='running',
            execution_node='awx',
            controller_node='',
            start_args='SENSITIVE',
            celery_task_id='abc123',
        )
        j.save()

        # if the UUID is excluded, don't reap it
        reaper.reap(i, excluded_uuids=excluded_uuids)
        job = Job.objects.first()
        if fail:
            assert job.status == 'failed'
            assert 'marked as failed' in job.job_explanation
            assert job.start_args == ''
        else:
            assert job.status == 'running'
Example #29
0
 def test_should_reap(self, status, fail, execution_node, controller_node, modified):
     i = Instance(hostname='awx')
     i.save()
     j = Job(
         status=status,
         execution_node=execution_node,
         controller_node=controller_node,
         start_args='SENSITIVE',
     )
     j.save()
     if modified:
         # we have to edit the modification time _without_ calling save()
         # (because .save() overwrites it to _now_)
         Job.objects.filter(id=j.id).update(modified=modified)
     reaper.reap(i)
     job = Job.objects.first()
     if fail:
         assert job.status == 'failed'
         assert 'marked as failed' in job.job_explanation
         assert job.start_args == ''
     else:
         assert job.status == status
Example #30
0
    def make_batch(N, jt_pos=0):
        jt = None
        while not jt:
            try:
                jt = JobTemplate.objects.all()[jt_pos % jt_count]
            except IndexError as e:
                # seems to happen every now and then due to some race condition
                print('Warning: IndexError on {} JT, error: {}'.format(
                    jt_pos % jt_count, e))
            jt_pos += 1
        jt_defaults = dict((f.attname, getattr(jt, f.attname))
                           for f in JobTemplate._meta.get_fields()
                           if f.concrete and f.attname in job_field_names
                           and getattr(jt, f.attname))
        jt_defaults['job_template_id'] = jt.pk
        jt_defaults[
            'unified_job_template_id'] = jt.pk  # populated by save method

        jobs = [
            Job(
                status=STATUS_OPTIONS[i % len(STATUS_OPTIONS)],
                started=now() - time_delta,
                created=now() - time_delta,
                modified=now() - time_delta,
                finished=now() - time_delta,
                elapsed=0.0,
                **jt_defaults,
            ) for i in range(N)
        ]
        ujs = UnifiedJob.objects.bulk_create(jobs)
        for uj in ujs:
            uj.unifiedjob_ptr_id = uj.id  # hack around the polymorphic id field not being picked up
        query = InsertQuery(Job)
        query.insert_values(fields, ujs)
        with connection.cursor() as cursor:
            query, params = query.sql_with_params()[0]
            cursor.execute(query, params)
        return ujs[-1], jt_pos, [uj.pk for uj in ujs]