Example #1
0
def mk_job(job_type='run', status='new', job_template=None, inventory=None, credential=None, project=None, extra_vars={}, persisted=True):
    job = Job(job_type=job_type, status=status, extra_vars=json.dumps(extra_vars))

    job.job_template = job_template
    job.inventory = inventory
    if persisted:
        job.save()
        job.credentials.add(credential)
    job.project = project

    return job
Example #2
0
 def test_project_update_metavars(self):
     data = Job(
         name='fake-job',
         pk=40,
         id=40,
         launch_type='manual',
         project=Project(name='jobs-sync', scm_revision='12345444'),
         job_template=JobTemplate(name='jobs-jt', id=92, pk=92),
     ).awx_meta_vars()
     assert data['awx_project_revision'] == '12345444'
     assert 'tower_job_template_id' in data
     assert data['tower_job_template_id'] == 92
     assert data['tower_job_template_name'] == 'jobs-jt'
Example #3
0
def test_vault_password_required(post, organization, admin):
    vault = CredentialType.defaults['vault']()
    vault.save()
    response = post(
        reverse('api:credential_list'),
        {
            'credential_type': vault.pk,
            'organization': organization.id,
            'name': 'Best credential ever',
            'inputs': {}
        },
        admin
    )
    assert response.status_code == 201
    assert Credential.objects.count() == 1

    # vault_password must be specified by launch time
    j = Job()
    j.save()
    j.credentials.add(Credential.objects.first())
    assert j.pre_start() == (False, None)
    assert 'required fields (vault_password)' in j.job_explanation
Example #4
0
 def test_should_reap(self, status, fail, execution_node, controller_node, modified):
     i = Instance(hostname='awx')
     i.save()
     j = Job(
         status=status,
         execution_node=execution_node,
         controller_node=controller_node,
         start_args='SENSITIVE',
     )
     j.save()
     if modified:
         # we have to edit the modification time _without_ calling save()
         # (because .save() overwrites it to _now_)
         Job.objects.filter(id=j.id).update(modified=modified)
     reaper.reap(i)
     job = Job.objects.first()
     if fail:
         assert job.status == 'failed'
         assert 'marked as failed' in job.job_explanation
         assert job.start_args == ''
     else:
         assert job.status == status
Example #5
0
def test_parent_failed(emit, event):
    j = Job()
    j.save()
    JobEvent.create_from_data(job_id=j.pk,
                              uuid='abc123',
                              event='playbook_on_task_start').save()
    assert JobEvent.objects.count() == 1
    for e in JobEvent.objects.all():
        assert e.failed is False

    JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123',
                              event=event).save()

    # the `playbook_on_stats` event is where we update the parent failed linkage
    JobEvent.create_from_data(job_id=j.pk,
                              parent_uuid='abc123',
                              event='playbook_on_stats').save()
    events = JobEvent.objects.filter(
        event__in=['playbook_on_task_start', event])
    assert events.count() == 2
    for e in events.all():
        assert e.failed is True
Example #6
0
def job_with_survey():
    return Job(
        name="test-job-with-passwords",
        extra_vars=json.dumps({
            'submitter_email': '*****@*****.**',
            'secret_key': '6kQngg3h8lgiSTvIEb21',
            'SSN': '123-45-6789'
        }),
        survey_passwords={
            'secret_key': '$encrypted$',
            'SSN': '$encrypted$'
        },
    )
class TestCleanupInconsistentCeleryTasks():
    @mock.patch.object(cache, 'get', return_value=None)
    @mock.patch.object(TaskManager, 'get_active_tasks', return_value=([], {}))
    @mock.patch.object(TaskManager, 'get_running_tasks', return_value=({'host1': [Job(id=2), Job(id=3),]}, []))
    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch.object(Instance.objects, 'filter', return_value=mock.MagicMock(first=lambda: None))
    @mock.patch('awx.main.scheduler.task_manager.logger')
    def test_instance_does_not_exist(self, logger_mock, *args):
        logger_mock.error = mock.MagicMock(side_effect=RuntimeError("mocked"))
        tm = TaskManager()
        with pytest.raises(RuntimeError) as excinfo:
            tm.cleanup_inconsistent_celery_tasks()

        assert "mocked" in str(excinfo.value)
        logger_mock.error.assert_called_once_with("Execution node Instance host1 not found in database. "
                                                  "The node is currently executing jobs ['job 2 (new)', "
                                                  "'job 3 (new)']")

    @mock.patch.object(cache, 'get', return_value=None)
    @mock.patch.object(TaskManager, 'get_active_tasks', return_value=([], {'host1': []}))
    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch.object(TaskManager, 'get_running_tasks')
    @mock.patch('awx.main.scheduler.task_manager.logger')
    def test_save_failed(self, logger_mock, get_running_tasks, *args):
        logger_mock.error = mock.MagicMock()
        job = Job(id=2, modified=tz_now(), status='running', celery_task_id='blah', execution_node='host1')
        job.websocket_emit_status = mock.MagicMock()
        get_running_tasks.return_value = ({'host1': [job]}, [])
        tm = TaskManager()

        with mock.patch.object(job, 'save', side_effect=DatabaseError):
            tm.cleanup_inconsistent_celery_tasks()
            job.save.assert_called_once()
            logger_mock.error.assert_called_once_with("Task job 2 (failed) DB error in marking failed. Job possibly deleted.")

    @mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
    @mock.patch('awx.main.scheduler.task_manager.Inspect')
    def test_multiple_active_instances_sanity_check(self, inspect_mock, *args):
        class MockInspector:
            pass

        mock_inspector = MockInspector()
        mock_inspector.active = lambda: {
            'celery@host1': [],
            'celery@host2': []
        }
        inspect_mock.return_value = mock_inspector
        tm = TaskManager()
        active_task_queues, queues = tm.get_active_tasks()
        assert 'host1' in queues
        assert 'host2' in queues
Example #8
0
 def test_project_update_metavars(self):
     data = Job(
         name='fake-job',
         pk=40,
         id=40,
         launch_type='manual',
         project=Project(name='jobs-sync', scm_revision='12345444'),
         job_template=JobTemplate(name='jobs-jt', id=92, pk=92),
     ).awx_meta_vars()
     for name in JOB_VARIABLE_PREFIXES:
         assert data['{}_project_revision'.format(name)] == '12345444'
         assert '{}_job_template_id'.format(name) in data
         assert data['{}_job_template_id'.format(name)] == 92
         assert data['{}_job_template_name'.format(name)] == 'jobs-jt'
Example #9
0
 def make_batch(N, **extra):
     jobs = [
         Job(status='canceled',
             created=now(),
             modified=now(),
             elapsed=0.,
             **extra) for i in range(N)
     ]
     ujs = UnifiedJob.objects.bulk_create(jobs)
     query = InsertQuery(Job)
     query.insert_values(fields, ujs)
     with connection.cursor() as cursor:
         query, params = query.sql_with_params()[0]
         cursor.execute(query, params)
     return ujs[-1]
Example #10
0
 def test_job_metavars(self):
     maker = User(username='******', pk=47, id=47)
     assert Job(name='fake-job',
                pk=42,
                id=42,
                launch_type='manual',
                created_by=maker).awx_meta_vars() == {
                    'tower_job_id': 42,
                    'awx_job_id': 42,
                    'tower_job_launch_type': 'manual',
                    'awx_job_launch_type': 'manual',
                    'awx_user_name': 'joe',
                    'tower_user_name': 'joe',
                    'awx_user_id': 47,
                    'tower_user_id': 47
                }
Example #11
0
    def test_job_relaunch_copy_vars(self, machine_credential, inventory, deploy_jobtemplate, post, mocker, net_credential):
        job_with_links = Job(name='existing-job', inventory=inventory)
        job_with_links.job_template = deploy_jobtemplate
        job_with_links.limit = "my_server"
        job_with_links.save()
        job_with_links.credentials.add(machine_credential)
        job_with_links.credentials.add(net_credential)
        second_job = job_with_links.copy_unified_job()

        # Check that job data matches the original variables
        assert [c.pk for c in second_job.credentials.all()] == [machine_credential.pk, net_credential.pk]
        assert second_job.inventory == job_with_links.inventory
        assert second_job.limit == 'my_server'
        assert net_credential in second_job.credentials.all()
Example #12
0
def spawn_bulk_jobs_simple(num):
    jobs = []
    for _ in range(num):
        j = Job()
        j.job_template = jt
        j.status = "canceled"
        jobs.append(j)
    with transaction.atomic():
        for i, j in enumerate(jobs):
            if i % 100 == 0:
                print(i)
            j.save()
Example #13
0
def test_log_from_job_event_object():
    job = Job(id=4)
    event = JobEvent(job_id=job.id)
    formatter = LogstashFormatter()

    data_for_log = formatter.reformat_data_for_log(
        dict(python_objects=dict(job_event=event)), kind='job_events')

    # Check entire body of data for any exceptions from getattr on event object
    for fd in data_for_log:
        if not isinstance(data_for_log[fd], basestring):
            continue
        assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])

    # Verify existence of certain high-importance fields
    for fd in ['changed', 'uuid', 'start_line', 'end_line', 'id', 'counter', 'host_name', 'stdout']:
        assert fd in data_for_log

    assert data_for_log['job'] == 4
Example #14
0
 def test_job_metavars(self):
     maker = User(username='******', pk=47, id=47)
     inv = Inventory(name='example-inv', id=45)
     result_hash = {}
     for name in JOB_VARIABLE_PREFIXES:
         result_hash['{}_job_id'.format(name)] = 42
         result_hash['{}_job_launch_type'.format(name)] = 'manual'
         result_hash['{}_user_name'.format(name)] = 'joe'
         result_hash['{}_user_email'.format(name)] = ''
         result_hash['{}_user_first_name'.format(name)] = ''
         result_hash['{}_user_last_name'.format(name)] = ''
         result_hash['{}_user_id'.format(name)] = 47
         result_hash['{}_inventory_id'.format(name)] = 45
         result_hash['{}_inventory_name'.format(name)] = 'example-inv'
     assert Job(name='fake-job',
                pk=42,
                id=42,
                launch_type='manual',
                created_by=maker,
                inventory=inv).awx_meta_vars() == result_hash
Example #15
0
    def test_job_relaunch_copy_vars(self, machine_credential, inventory,
                                    deploy_jobtemplate, post, mocker,
                                    net_credential):
        job_with_links = Job(name='existing-job', inventory=inventory)
        job_with_links.job_template = deploy_jobtemplate
        job_with_links.limit = "my_server"
        job_with_links.save()
        job_with_links.credentials.add(machine_credential)
        job_with_links.credentials.add(net_credential)
        with mocker.patch(
                'awx.main.models.unified_jobs.UnifiedJobTemplate._get_unified_job_field_names',
                return_value=['inventory', 'credential', 'limit']):
            second_job = job_with_links.copy_unified_job()

        # Check that job data matches the original variables
        assert second_job.credential == job_with_links.credential
        assert second_job.inventory == job_with_links.inventory
        assert second_job.limit == 'my_server'
        assert net_credential in second_job.credentials.all()
Example #16
0
    def make_batch(N, jt_pos=0):
        jt = None
        while not jt:
            try:
                jt = JobTemplate.objects.all()[jt_pos % jt_count]
            except IndexError as e:
                # seems to happen every now and then due to some race condition
                print('Warning: IndexError on {} JT, error: {}'.format(
                    jt_pos % jt_count, e))
            jt_pos += 1
        jt_defaults = dict((f.attname, getattr(jt, f.attname))
                           for f in JobTemplate._meta.get_fields()
                           if f.concrete and f.attname in job_field_names
                           and getattr(jt, f.attname))
        jt_defaults['job_template_id'] = jt.pk
        jt_defaults[
            'unified_job_template_id'] = jt.pk  # populated by save method

        jobs = [
            Job(
                status=STATUS_OPTIONS[i % len(STATUS_OPTIONS)],
                started=now() - time_delta,
                created=now() - time_delta,
                modified=now() - time_delta,
                finished=now() - time_delta,
                elapsed=0.0,
                **jt_defaults,
            ) for i in range(N)
        ]
        ujs = UnifiedJob.objects.bulk_create(jobs)
        for uj in ujs:
            uj.unifiedjob_ptr_id = uj.id  # hack around the polymorphic id field not being picked up
        query = InsertQuery(Job)
        query.insert_values(fields, ujs)
        with connection.cursor() as cursor:
            query, params = query.sql_with_params()[0]
            cursor.execute(query, params)
        return ujs[-1], jt_pos, [uj.pk for uj in ujs]
Example #17
0
 def test_jt_friend_cancel(self, deploy_jobtemplate, admin_user, jt_user):
     job = Job(job_template=deploy_jobtemplate, created_by=admin_user)
     access = JobAccess(jt_user)
     assert not access.can_cancel(job)
Example #18
0
 def test_jt_self_cancel(self, deploy_jobtemplate, jt_user):
     job = Job(job_template=deploy_jobtemplate, created_by=jt_user)
     access = JobAccess(jt_user)
     assert access.can_cancel(job)
Example #19
0
def job(mocker, hosts, inventory):
    j = Job(inventory=inventory, id=2)
    j._get_inventory_hosts = mocker.Mock(return_value=hosts)
    return j
 def job(self):
     return Job(name="blah", pk=1)
Example #21
0
    params['organization'] = organization.id
    response = put(reverse('api:credential_detail', kwargs={'pk': cred.pk}),
                   params, admin)
    assert response.status_code == 200

    cred = Credential.objects.all()[:1].get()
    assert cred.inputs['username'] == 'joe'
    assert 'password' not in cred.inputs


@pytest.mark.django_db
@pytest.mark.parametrize(
    'relation, related_obj',
    [
        ['ad_hoc_commands', AdHocCommand()],
        ['unifiedjobs', Job()],
        ['unifiedjobtemplates', JobTemplate()],
        ['unifiedjobtemplates',
         InventorySource(source='ec2')],
        ['projects', Project()],
        ['workflowjobnodes', WorkflowJobNode()],
    ],
)
def test_credential_type_mutability(patch, organization, admin,
                                    credentialtype_ssh, credentialtype_aws,
                                    relation, related_obj):
    cred = Credential(credential_type=credentialtype_ssh,
                      name='Best credential ever',
                      organization=organization,
                      inputs={
                          'username': u'jim',
Example #22
0
def job(container_group):
    return Job(pk=1, id=1, project=Project(), instance_group=container_group, inventory=Inventory(), job_template=JobTemplate(id=1, name='foo'))
Example #23
0
def job2(mocker, hosts2, inventory, mock_cache):
    j = Job(inventory=inventory, id=3)
    j._get_inventory_hosts = mocker.Mock(return_value=hosts2)
    j._get_memcache_connection = mocker.Mock(return_value=mock_cache)
    return j
Example #24
0
 def test_jt_org_admin_cancel(self, deploy_jobtemplate, org_admin, jt_user):
     job = Job(job_template=deploy_jobtemplate, created_by=jt_user)
     access = JobAccess(org_admin)
     assert access.can_cancel(job)
Example #25
0
def T(impact):
    j = mock.Mock(Job())
    j.task_impact = impact
    return j
def jobs(mocker):
    return [Job(id=x, name='job-%d' % x) for x in range(0, 25)]
Example #27
0
def spawn_bulk_jobs(num):
    jobs = []
    for i in range(num):
        j = Job()
        j.job_template = jt
        j.project = project
        j.playbook = jt.playbook
        j.inventory = inv
        j.name = "bulk_{0}".format(i)
        j.status = "canceled"
        j.extra_vars = '{"sleeptime": 60}'
        j.allow_simultaneous = False
        jobs.append(j)
    with transaction.atomic():
        for i, j in enumerate(jobs):
            if i % 100 == 0:
                print(i)
                time.sleep(.5)
            j.save()
            j.credentials.add(cred)