Пример #1
0
class LaunchTimeConfigBase(BaseModel):
    """
    Needed as separate class from LaunchTimeConfig because some models
    use `extra_data` and some use `extra_vars`. We cannot change the API,
    so we force fake it in the model definitions
     - model defines extra_vars - use this class
     - model needs to use extra data - use LaunchTimeConfig
    Use this for models which are SurveyMixins and UnifiedJobs or Templates
    """
    class Meta:
        abstract = True

    # Prompting-related fields that have to be handled as special cases
    inventory = models.ForeignKey(
        'Inventory',
        related_name='%(class)ss',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
        help_text=
        _('Inventory applied as a prompt, assuming job template prompts for inventory'
          ),
    )
    # All standard fields are stored in this dictionary field
    # This is a solution to the nullable CharField problem, specific to prompting
    char_prompts = JSONBlob(default=dict, blank=True)

    def prompts_dict(self, display=False):
        data = {}
        # Some types may have different prompts, but always subset of JT prompts
        for prompt_name in JobTemplate.get_ask_mapping().keys():
            try:
                field = self._meta.get_field(prompt_name)
            except FieldDoesNotExist:
                field = None
            if isinstance(field, models.ManyToManyField):
                if not self.pk:
                    continue  # unsaved object can't have related many-to-many
                prompt_val = set(getattr(self, prompt_name).all())
                if len(prompt_val) > 0:
                    data[prompt_name] = prompt_val
            elif prompt_name == 'extra_vars':
                if self.extra_vars:
                    if display:
                        data[prompt_name] = self.display_extra_vars()
                    else:
                        data[prompt_name] = self.extra_vars
                    # Depending on model, field type may save and return as string
                    if isinstance(data[prompt_name], str):
                        data[prompt_name] = parse_yaml_or_json(
                            data[prompt_name])
                if self.survey_passwords and not display:
                    data['survey_passwords'] = self.survey_passwords
            else:
                prompt_val = getattr(self, prompt_name)
                if prompt_val is not None:
                    data[prompt_name] = prompt_val
        return data
Пример #2
0
class Setting(CreatedModifiedModel):

    key = models.CharField(max_length=255)
    value = JSONBlob(null=True)
    user = prevent_search(
        models.ForeignKey('auth.User',
                          related_name='settings',
                          default=None,
                          null=True,
                          editable=False,
                          on_delete=models.CASCADE))

    def __str__(self):
        try:
            json_value = json.dumps(self.value)
        except ValueError:
            # In the rare case the DB value is invalid JSON.
            json_value = u'<Invalid JSON>'
        if self.user:
            return u'{} ({}) = {}'.format(self.key, self.user, json_value)
        else:
            return u'{} = {}'.format(self.key, json_value)

    def save(self, *args, **kwargs):
        encrypted = settings_registry.is_setting_encrypted(self.key)
        new_instance = not bool(self.pk)
        # If update_fields has been specified, add our field names to it,
        # if it hasn't been specified, then we're just doing a normal save.
        update_fields = kwargs.get('update_fields', [])
        # When first saving to the database, don't store any encrypted field
        # value, but instead save it until after the instance is created.
        # Otherwise, store encrypted value to the database.
        if encrypted:
            if new_instance:
                self._saved_value = self.value
                self.value = ''
            else:
                self.value = encrypt_field(self, 'value')
                if 'value' not in update_fields:
                    update_fields.append('value')
        super(Setting, self).save(*args, **kwargs)
        # After saving a new instance for the first time, set the encrypted
        # field and save again.
        if encrypted and new_instance:
            from awx.main.signals import disable_activity_stream

            with disable_activity_stream():
                self.value = self._saved_value
                self.save(update_fields=['value'])

    @classmethod
    def get_cache_key(self, key):
        return key

    @classmethod
    def get_cache_id_key(self, key):
        return '{}_ID'.format(key)
Пример #3
0
class LaunchTimeConfig(LaunchTimeConfigBase):
    """
    Common model for all objects that save details of a saved launch config
    WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet)
    """

    class Meta:
        abstract = True

    # Special case prompting fields, even more special than the other ones
    extra_data = JSONBlob(default=dict, blank=True)
    survey_passwords = prevent_search(
        JSONBlob(
            default=dict,
            editable=False,
            blank=True,
        )
    )
    # Credentials needed for non-unified job / unified JT models
    credentials = models.ManyToManyField('Credential', related_name='%(class)ss')

    @property
    def extra_vars(self):
        return self.extra_data

    @extra_vars.setter
    def extra_vars(self, extra_vars):
        self.extra_data = extra_vars

    def display_extra_vars(self):
        """
        Hides fields marked as passwords in survey.
        """
        if hasattr(self, 'survey_passwords') and self.survey_passwords:
            extra_vars = parse_yaml_or_json(self.extra_vars).copy()
            for key, value in self.survey_passwords.items():
                if key in extra_vars:
                    extra_vars[key] = value
            return extra_vars
        else:
            return self.extra_vars

    def display_extra_data(self):
        return self.display_extra_vars()
Пример #4
0
class Notification(CreatedModifiedModel):
    """
    A notification event emitted when a NotificationTemplate is run
    """

    NOTIFICATION_STATE_CHOICES = [
        ('pending', _('Pending')),
        ('successful', _('Successful')),
        ('failed', _('Failed')),
    ]

    class Meta:
        app_label = 'main'
        ordering = ('pk', )

    notification_template = models.ForeignKey('NotificationTemplate',
                                              related_name='notifications',
                                              on_delete=models.CASCADE,
                                              editable=False)
    status = models.CharField(
        max_length=20,
        choices=NOTIFICATION_STATE_CHOICES,
        default='pending',
        editable=False,
    )
    error = models.TextField(
        blank=True,
        default='',
        editable=False,
    )
    notifications_sent = models.IntegerField(
        default=0,
        editable=False,
    )
    notification_type = models.CharField(
        max_length=32,
        choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
    )
    recipients = models.TextField(
        blank=True,
        default='',
        editable=False,
    )
    subject = models.TextField(
        blank=True,
        default='',
        editable=False,
    )
    body = JSONBlob(default=dict, blank=True)

    def get_absolute_url(self, request=None):
        return reverse('api:notification_detail',
                       kwargs={'pk': self.pk},
                       request=request)
Пример #5
0
class SurveyJobMixin(models.Model):
    class Meta:
        abstract = True

    survey_passwords = prevent_search(
        JSONBlob(
            default=dict,
            editable=False,
            blank=True,
        ))

    def display_extra_vars(self):
        """
        Hides fields marked as passwords in survey.
        """
        if self.survey_passwords:
            extra_vars = json.loads(self.extra_vars)
            for key, value in self.survey_passwords.items():
                if key in extra_vars:
                    extra_vars[key] = value
            return json.dumps(extra_vars)
        else:
            return self.extra_vars

    def decrypted_extra_vars(self):
        """
        Decrypts fields marked as passwords in survey.
        """
        if self.survey_passwords:
            extra_vars = json.loads(self.extra_vars)
            for key in self.survey_passwords:
                value = extra_vars.get(key)
                if value and isinstance(
                        value, str) and value.startswith('$encrypted$'):
                    extra_vars[key] = decrypt_value(
                        get_encryption_key('value', pk=None), value)
            return json.dumps(extra_vars)
        else:
            return self.extra_vars
Пример #6
0
class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin,
              CustomVirtualEnvMixin, RelatedJobsMixin):
    """
    A project represents a playbook git repo that can access a set of inventories
    """

    SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
    FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials']
    FIELDS_TO_DISCARD_AT_COPY = ['local_path']
    FIELDS_TRIGGER_UPDATE = frozenset(
        ['scm_url', 'scm_branch', 'scm_type', 'scm_refspec'])

    class Meta:
        app_label = 'main'
        ordering = ('id', )

    default_environment = models.ForeignKey(
        'ExecutionEnvironment',
        null=True,
        blank=True,
        default=None,
        on_delete=polymorphic.SET_NULL,
        related_name='+',
        help_text=_(
            'The default execution environment for jobs run using this project.'
        ),
    )
    scm_update_on_launch = models.BooleanField(
        default=False,
        help_text=_(
            'Update the project when a job is launched that uses the project.'
        ),
    )
    scm_update_cache_timeout = models.PositiveIntegerField(
        default=0,
        blank=True,
        help_text=_(
            'The number of seconds after the last project update ran that a new '
            'project update will be launched as a job dependency.'),
    )
    allow_override = models.BooleanField(
        default=False,
        help_text=_(
            'Allow changing the SCM branch or revision in a job template '
            'that uses this project.'),
    )

    scm_revision = models.CharField(
        max_length=1024,
        blank=True,
        default='',
        editable=False,
        verbose_name=_('SCM Revision'),
        help_text=_('The last revision fetched by a project update'),
    )

    playbook_files = JSONBlob(
        default=list,
        blank=True,
        editable=False,
        verbose_name=_('Playbook Files'),
        help_text=_('List of playbooks found in the project'),
    )

    inventory_files = JSONBlob(
        default=list,
        blank=True,
        editable=False,
        verbose_name=_('Inventory Files'),
        help_text=
        _('Suggested list of content that could be Ansible inventory in the project'
          ),
    )

    admin_role = ImplicitRoleField(parent_role=[
        'organization.project_admin_role',
        'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
    ])

    use_role = ImplicitRoleField(parent_role='admin_role', )

    update_role = ImplicitRoleField(parent_role='admin_role', )

    read_role = ImplicitRoleField(parent_role=[
        'organization.auditor_role',
        'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
        'use_role',
        'update_role',
    ])

    @classmethod
    def _get_unified_job_class(cls):
        return ProjectUpdate

    @classmethod
    def _get_unified_job_field_names(cls):
        return set(f.name for f in ProjectOptions._meta.fields) | set(
            ['name', 'description', 'organization'])

    def clean_organization(self):
        if self.pk:
            old_org_id = getattr(self, '_prior_values_store',
                                 {}).get('organization_id', None)
            if self.organization_id != old_org_id and self.jobtemplates.exists(
            ):
                raise ValidationError({
                    'organization':
                    _('Organization cannot be changed when in use by job templates.'
                      )
                })
        return self.organization

    def save(self, *args, **kwargs):
        new_instance = not bool(self.pk)
        pre_save_vals = getattr(self, '_prior_values_store', {})
        # If update_fields has been specified, add our field names to it,
        # if it hasn't been specified, then we're just doing a normal save.
        update_fields = kwargs.get('update_fields', [])
        skip_update = bool(kwargs.pop('skip_update', False))
        # Create auto-generated local path if project uses SCM.
        if self.pk and self.scm_type and not self.local_path.startswith('_'):
            slug_name = slugify(str(self.name)).replace(u'-', u'_')
            self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
            if 'local_path' not in update_fields:
                update_fields.append('local_path')
        # Do the actual save.
        super(Project, self).save(*args, **kwargs)
        if new_instance:
            update_fields = []
            # Generate local_path for SCM after initial save (so we have a PK).
            if self.scm_type and not self.local_path.startswith('_'):
                update_fields.append('local_path')
            if update_fields:
                from awx.main.signals import disable_activity_stream

                with disable_activity_stream():
                    self.save(update_fields=update_fields)
        # If we just created a new project with SCM, start the initial update.
        # also update if certain fields have changed
        relevant_change = any(
            pre_save_vals.get(fd_name, None) != self._prior_values_store.get(
                fd_name, None) for fd_name in self.FIELDS_TRIGGER_UPDATE)
        if (relevant_change
                or new_instance) and (not skip_update) and self.scm_type:
            self.update()

    def _get_current_status(self):
        if self.scm_type:
            if self.current_job and self.current_job.status:
                return self.current_job.status
            elif not self.last_job:
                return 'never updated'
            # inherit the child job status on failure
            elif self.last_job_failed:
                return self.last_job.status
            # Return the successful status
            else:
                return self.last_job.status
        elif not self.get_project_path():
            return 'missing'
        else:
            return 'ok'

    def _get_last_job_run(self):
        if self.scm_type and self.last_job:
            return self.last_job.finished
        else:
            project_path = self.get_project_path()
            if project_path:
                try:
                    mtime = os.path.getmtime(smart_str(project_path))
                    dt = datetime.datetime.fromtimestamp(mtime)
                    return make_aware(dt, get_default_timezone())
                except os.error:
                    pass

    def _can_update(self):
        return bool(self.scm_type)

    def create_project_update(self, **kwargs):
        return self.create_unified_job(**kwargs)

    @property
    def cache_timeout_blocked(self):
        if not self.last_job_run:
            return False
        if (self.last_job_run + datetime.timedelta(
                seconds=self.scm_update_cache_timeout)) > now():
            return True
        return False

    @property
    def needs_update_on_launch(self):
        if self.scm_type and self.scm_update_on_launch:
            if not self.last_job_run:
                return True
            if (self.last_job_run + datetime.timedelta(
                    seconds=self.scm_update_cache_timeout)) <= now():
                return True
        return False

    @property
    def cache_id(self):
        return str(self.last_job_id)

    @property
    def notification_templates(self):
        base_notification_templates = NotificationTemplate.objects
        error_notification_templates = list(
            base_notification_templates.filter(
                unifiedjobtemplate_notification_templates_for_errors=self))
        started_notification_templates = list(
            base_notification_templates.filter(
                unifiedjobtemplate_notification_templates_for_started=self))
        success_notification_templates = list(
            base_notification_templates.filter(
                unifiedjobtemplate_notification_templates_for_success=self))
        # Get Organization NotificationTemplates
        if self.organization is not None:
            error_notification_templates = set(
                error_notification_templates + list(
                    base_notification_templates.filter(
                        organization_notification_templates_for_errors=self.
                        organization)))
            started_notification_templates = set(
                started_notification_templates + list(
                    base_notification_templates.filter(
                        organization_notification_templates_for_started=self.
                        organization)))
            success_notification_templates = set(
                success_notification_templates + list(
                    base_notification_templates.filter(
                        organization_notification_templates_for_success=self.
                        organization)))
        return dict(error=list(error_notification_templates),
                    started=list(started_notification_templates),
                    success=list(success_notification_templates))

    def get_absolute_url(self, request=None):
        return reverse('api:project_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    '''
    RelatedJobsMixin
    '''

    def _get_related_jobs(self):
        return UnifiedJob.objects.non_polymorphic().filter(
            models.Q(job__project=self)
            | models.Q(projectupdate__project=self))

    def delete(self, *args, **kwargs):
        paths_to_delete = (self.get_project_path(check_if_exists=False),
                           self.get_cache_path())
        r = super(Project, self).delete(*args, **kwargs)
        for path_to_delete in paths_to_delete:
            if self.scm_type and path_to_delete:  # non-manual, concrete path
                from awx.main.tasks.system import delete_project_files

                delete_project_files.delay(path_to_delete)
        return r
Пример #7
0
Файл: ha.py Проект: mahak/awx
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
    """A model representing a Queue/Group of AWX Instances."""

    name = models.CharField(max_length=250, unique=True)
    created = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)
    instances = models.ManyToManyField(
        'Instance',
        related_name='rampart_groups',
        editable=False,
        help_text=_('Instances that are members of this InstanceGroup'),
    )
    is_container_group = models.BooleanField(default=False)
    credential = models.ForeignKey(
        'Credential',
        related_name='%(class)ss',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
    )
    pod_spec_override = prevent_search(
        models.TextField(
            blank=True,
            default='',
        ))
    policy_instance_percentage = models.IntegerField(
        default=0,
        help_text=_(
            "Percentage of Instances to automatically assign to this group"))
    policy_instance_minimum = models.IntegerField(
        default=0,
        help_text=
        _("Static minimum number of Instances to automatically assign to this group"
          ))
    policy_instance_list = JSONBlob(
        default=list,
        blank=True,
        help_text=
        _("List of exact-match Instances that will always be automatically assigned to this group"
          ))

    POLICY_FIELDS = frozenset(
        ('policy_instance_list', 'policy_instance_minimum',
         'policy_instance_percentage'))

    def get_absolute_url(self, request=None):
        return reverse('api:instance_group_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    @property
    def capacity(self):
        return sum(inst.capacity for inst in self.instances.all())

    @property
    def jobs_running(self):
        return UnifiedJob.objects.filter(status__in=('running', 'waiting'),
                                         instance_group=self).count()

    @property
    def jobs_total(self):
        return UnifiedJob.objects.filter(instance_group=self).count()

    '''
    RelatedJobsMixin
    '''

    def _get_related_jobs(self):
        return UnifiedJob.objects.filter(instance_group=self)

    class Meta:
        app_label = 'main'

    def set_default_policy_fields(self):
        self.policy_instance_list = []
        self.policy_instance_minimum = 0
        self.policy_instance_percentage = 0
Пример #8
0
Файл: ha.py Проект: lj020326/awx
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
    """A model representing a Queue/Group of AWX Instances."""

    objects = InstanceGroupManager()

    name = models.CharField(max_length=250, unique=True)
    created = models.DateTimeField(auto_now_add=True)
    modified = models.DateTimeField(auto_now=True)
    instances = models.ManyToManyField(
        'Instance',
        related_name='rampart_groups',
        editable=False,
        help_text=_('Instances that are members of this InstanceGroup'),
    )
    is_container_group = models.BooleanField(default=False)
    credential = models.ForeignKey(
        'Credential',
        related_name='%(class)ss',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
    )
    pod_spec_override = prevent_search(
        models.TextField(
            blank=True,
            default='',
        ))
    policy_instance_percentage = models.IntegerField(
        default=0,
        help_text=_(
            "Percentage of Instances to automatically assign to this group"))
    policy_instance_minimum = models.IntegerField(
        default=0,
        help_text=
        _("Static minimum number of Instances to automatically assign to this group"
          ))
    policy_instance_list = JSONBlob(
        default=list,
        blank=True,
        help_text=
        _("List of exact-match Instances that will always be automatically assigned to this group"
          ))

    POLICY_FIELDS = frozenset(
        ('policy_instance_list', 'policy_instance_minimum',
         'policy_instance_percentage'))

    def get_absolute_url(self, request=None):
        return reverse('api:instance_group_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    @property
    def capacity(self):
        return sum(inst.capacity for inst in self.instances.all())

    @property
    def jobs_running(self):
        return UnifiedJob.objects.filter(status__in=('running', 'waiting'),
                                         instance_group=self).count()

    @property
    def jobs_total(self):
        return UnifiedJob.objects.filter(instance_group=self).count()

    '''
    RelatedJobsMixin
    '''

    def _get_related_jobs(self):
        return UnifiedJob.objects.filter(instance_group=self)

    class Meta:
        app_label = 'main'

    @staticmethod
    def fit_task_to_most_remaining_capacity_instance(
            task,
            instances,
            impact=None,
            capacity_type=None,
            add_hybrid_control_cost=False):
        impact = impact if impact else task.task_impact
        capacity_type = capacity_type if capacity_type else task.capacity_type
        instance_most_capacity = None
        most_remaining_capacity = -1
        for i in instances:
            if i.node_type not in (capacity_type, 'hybrid'):
                continue
            would_be_remaining = i.remaining_capacity - impact
            # hybrid nodes _always_ control their own tasks
            if add_hybrid_control_cost and i.node_type == 'hybrid':
                would_be_remaining -= settings.AWX_CONTROL_NODE_TASK_IMPACT
            if would_be_remaining >= 0 and (
                    instance_most_capacity is None
                    or would_be_remaining > most_remaining_capacity):
                instance_most_capacity = i
                most_remaining_capacity = would_be_remaining
        return instance_most_capacity

    @staticmethod
    def find_largest_idle_instance(instances, capacity_type='execution'):
        largest_instance = None
        for i in instances:
            if i.node_type not in (capacity_type, 'hybrid'):
                continue
            if i.jobs_running == 0:
                if largest_instance is None:
                    largest_instance = i
                elif i.capacity > largest_instance.capacity:
                    largest_instance = i
        return largest_instance

    def set_default_policy_fields(self):
        self.policy_instance_list = []
        self.policy_instance_minimum = 0
        self.policy_instance_percentage = 0
Пример #9
0
class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin,
          TaskManagerJobMixin, CustomVirtualEnvMixin, WebhookMixin):
    """
    A job applies a project (with playbook) to an inventory source with a given
    credential.  It represents a single invocation of ansible-playbook with the
    given parameters.
    """
    class Meta:
        app_label = 'main'
        ordering = ('id', )

    job_template = models.ForeignKey(
        'JobTemplate',
        related_name='jobs',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
    )
    hosts = models.ManyToManyField(
        'Host',
        related_name='jobs',
        editable=False,
        through='JobHostSummary',
    )
    artifacts = JSONBlob(
        default=dict,
        blank=True,
        editable=False,
    )
    scm_revision = models.CharField(
        max_length=1024,
        blank=True,
        default='',
        editable=False,
        verbose_name=_('SCM Revision'),
        help_text=_(
            'The SCM Revision from the Project used for this job, if available'
        ),
    )
    project_update = models.ForeignKey(
        'ProjectUpdate',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
        help_text=
        _('The SCM Refresh task used to make sure the playbooks were available for the job run'
          ),
    )
    job_slice_number = models.PositiveIntegerField(
        blank=True,
        default=0,
        help_text=_(
            "If part of a sliced job, the ID of the inventory slice operated on. "
            "If not part of sliced job, parameter is not used."),
    )
    job_slice_count = models.PositiveIntegerField(
        blank=True,
        default=1,
        help_text=_(
            "If ran as part of sliced jobs, the total number of slices. "
            "If 1, job is not part of a sliced job."),
    )

    def _get_parent_field_name(self):
        return 'job_template'

    @classmethod
    def _get_task_class(cls):
        from awx.main.tasks.jobs import RunJob

        return RunJob

    def _global_timeout_setting(self):
        return 'DEFAULT_JOB_TIMEOUT'

    @classmethod
    def _get_unified_job_template_class(cls):
        return JobTemplate

    def get_absolute_url(self, request=None):
        return reverse('api:job_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    def get_ui_url(self):
        return urljoin(settings.TOWER_URL_BASE,
                       "/#/jobs/playbook/{}".format(self.pk))

    @property
    def event_class(self):
        if self.has_unpartitioned_events:
            return UnpartitionedJobEvent
        return JobEvent

    def copy_unified_job(self, **new_prompts):
        # Needed for job slice relaunch consistency, do no re-spawn workflow job
        # target same slice as original job
        new_prompts['_prevent_slicing'] = True
        new_prompts.setdefault('_eager_fields', {})
        new_prompts['_eager_fields'][
            'job_slice_number'] = self.job_slice_number
        new_prompts['_eager_fields']['job_slice_count'] = self.job_slice_count
        return super(Job, self).copy_unified_job(**new_prompts)

    def get_passwords_needed_to_start(self):
        return self.passwords_needed_to_start

    def _get_hosts(self, **kwargs):
        Host = JobHostSummary._meta.get_field('host').related_model
        kwargs['job_host_summaries__job__pk'] = self.pk
        return Host.objects.filter(**kwargs)

    def retry_qs(self, status):
        """
        Returns Host queryset that will be used to produce the `limit`
        field in a retry on a subset of hosts
        """
        kwargs = {}
        if status == 'all':
            pass
        elif status == 'failed':
            # Special case for parity with Ansible .retry files
            kwargs['job_host_summaries__failed'] = True
        elif status in ['ok', 'changed', 'unreachable']:
            if status == 'unreachable':
                status_field = 'dark'
            else:
                status_field = status
            kwargs['job_host_summaries__{}__gt'.format(status_field)] = 0
        else:
            raise ParseError(
                _('{status_value} is not a valid status option.').format(
                    status_value=status))
        return self._get_hosts(**kwargs)

    def _get_task_impact(self):
        if self.launch_type == 'callback':
            count_hosts = 2
        else:
            # If for some reason we can't count the hosts then lets assume the impact as forks
            if self.inventory is not None:
                count_hosts = self.inventory.total_hosts
                if self.job_slice_count > 1:
                    # Integer division intentional
                    count_hosts = (
                        count_hosts + self.job_slice_count -
                        self.job_slice_number) // self.job_slice_count
            else:
                count_hosts = 5 if self.forks == 0 else self.forks
        return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1

    @property
    def successful_hosts(self):
        return self._get_hosts(job_host_summaries__ok__gt=0)

    @property
    def failed_hosts(self):
        return self._get_hosts(job_host_summaries__failures__gt=0)

    @property
    def changed_hosts(self):
        return self._get_hosts(job_host_summaries__changed__gt=0)

    @property
    def dark_hosts(self):
        return self._get_hosts(job_host_summaries__dark__gt=0)

    @property
    def unreachable_hosts(self):
        return self.dark_hosts

    @property
    def skipped_hosts(self):
        return self._get_hosts(job_host_summaries__skipped__gt=0)

    @property
    def processed_hosts(self):
        return self._get_hosts(job_host_summaries__processed__gt=0)

    @property
    def ignored_hosts(self):
        return self._get_hosts(job_host_summaries__ignored__gt=0)

    @property
    def rescued_hosts(self):
        return self._get_hosts(job_host_summaries__rescued__gt=0)

    def notification_data(self, block=5):
        data = super(Job, self).notification_data()
        all_hosts = {}
        # NOTE: Probably related to job event slowness, remove at some point -matburt
        if block and self.status != 'running':
            summaries = self.job_host_summaries.all()
            while block > 0 and not len(summaries):
                time.sleep(1)
                block -= 1
        else:
            summaries = self.job_host_summaries.all()
        for h in self.job_host_summaries.all():
            all_hosts[h.host_name] = dict(
                failed=h.failed,
                changed=h.changed,
                dark=h.dark,
                failures=h.failures,
                ok=h.ok,
                processed=h.processed,
                skipped=h.skipped,
                rescued=h.rescued,
                ignored=h.ignored,
            )
        data.update(
            dict(
                inventory=self.inventory.name if self.inventory else None,
                project=self.project.name if self.project else None,
                playbook=self.playbook,
                credential=getattr(self.machine_credential, 'name', None),
                limit=self.limit,
                extra_vars=self.display_extra_vars(),
                hosts=all_hosts,
            ))
        return data

    def _resources_sufficient_for_launch(self):
        return not (self.inventory_id is None or self.project_id is None)

    def display_artifacts(self):
        """
        Hides artifacts if they are marked as no_log type artifacts.
        """
        artifacts = self.artifacts
        if artifacts.get('_ansible_no_log', False):
            return "$hidden due to Ansible no_log flag$"
        return artifacts

    def get_effective_artifacts(self, **kwargs):
        """Return unified job artifacts (from set_stats) to pass downstream in workflows"""
        if isinstance(self.artifacts, dict):
            return self.artifacts
        return {}

    @property
    def is_container_group_task(self):
        return bool(self.instance_group
                    and self.instance_group.is_container_group)

    @property
    def preferred_instance_groups(self):
        if self.organization is not None:
            organization_groups = [
                x for x in self.organization.instance_groups.all()
            ]
        else:
            organization_groups = []
        if self.inventory is not None:
            inventory_groups = [
                x for x in self.inventory.instance_groups.all()
            ]
        else:
            inventory_groups = []
        if self.job_template is not None:
            template_groups = [
                x for x in self.job_template.instance_groups.all()
            ]
        else:
            template_groups = []
        selected_groups = template_groups + inventory_groups + organization_groups
        if not selected_groups:
            return self.global_instance_groups
        return selected_groups

    def awx_meta_vars(self):
        r = super(Job, self).awx_meta_vars()
        if self.project:
            for name in JOB_VARIABLE_PREFIXES:
                r['{}_project_revision'.format(
                    name)] = self.project.scm_revision
                r['{}_project_scm_branch'.format(
                    name)] = self.project.scm_branch
        if self.scm_branch:
            for name in JOB_VARIABLE_PREFIXES:
                r['{}_job_scm_branch'.format(name)] = self.scm_branch
        if self.job_template:
            for name in JOB_VARIABLE_PREFIXES:
                r['{}_job_template_id'.format(name)] = self.job_template.pk
                r['{}_job_template_name'.format(name)] = self.job_template.name
        return r

    '''
    JobNotificationMixin
    '''

    def get_notification_templates(self):
        if not self.job_template:
            return NotificationTemplate.objects.none()
        return self.job_template.notification_templates

    def get_notification_friendly_name(self):
        return "Job"

    def _get_inventory_hosts(self,
                             only=[
                                 'name', 'ansible_facts',
                                 'ansible_facts_modified', 'modified',
                                 'inventory_id'
                             ]):
        if not self.inventory:
            return []
        return self.inventory.hosts.only(*only)

    def start_job_fact_cache(self,
                             destination,
                             modification_times,
                             timeout=None):
        self.log_lifecycle("start_job_fact_cache")
        os.makedirs(destination, mode=0o700)
        hosts = self._get_inventory_hosts()
        if timeout is None:
            timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
        if timeout > 0:
            # exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
            timeout = now() - datetime.timedelta(seconds=timeout)
            hosts = hosts.filter(ansible_facts_modified__gte=timeout)
        for host in hosts:
            filepath = os.sep.join(map(str, [destination, host.name]))
            if not os.path.realpath(filepath).startswith(destination):
                system_tracking_logger.error(
                    'facts for host {} could not be cached'.format(
                        smart_str(host.name)))
                continue
            try:
                with codecs.open(filepath, 'w', encoding='utf-8') as f:
                    os.chmod(f.name, 0o600)
                    json.dump(host.ansible_facts, f)
            except IOError:
                system_tracking_logger.error(
                    'facts for host {} could not be cached'.format(
                        smart_str(host.name)))
                continue
            # make note of the time we wrote the file so we can check if it changed later
            modification_times[filepath] = os.path.getmtime(filepath)

    def finish_job_fact_cache(self, destination, modification_times):
        self.log_lifecycle("finish_job_fact_cache")
        for host in self._get_inventory_hosts():
            filepath = os.sep.join(map(str, [destination, host.name]))
            if not os.path.realpath(filepath).startswith(destination):
                system_tracking_logger.error(
                    'facts for host {} could not be cached'.format(
                        smart_str(host.name)))
                continue
            if os.path.exists(filepath):
                # If the file changed since we wrote it pre-playbook run...
                modified = os.path.getmtime(filepath)
                if modified > modification_times.get(filepath, 0):
                    with codecs.open(filepath, 'r', encoding='utf-8') as f:
                        try:
                            ansible_facts = json.load(f)
                        except ValueError:
                            continue
                        host.ansible_facts = ansible_facts
                        host.ansible_facts_modified = now()
                        host.save()
                        system_tracking_logger.info(
                            'New fact for inventory {} host {}'.format(
                                smart_str(host.inventory.name),
                                smart_str(host.name)),
                            extra=dict(
                                inventory_id=host.inventory.id,
                                host_name=host.name,
                                ansible_facts=host.ansible_facts,
                                ansible_facts_modified=host.
                                ansible_facts_modified.isoformat(),
                                job_id=self.id,
                            ),
                        )
            else:
                # if the file goes missing, ansible removed it (likely via clear_facts)
                host.ansible_facts = {}
                host.ansible_facts_modified = now()
                system_tracking_logger.info(
                    'Facts cleared for inventory {} host {}'.format(
                        smart_str(host.inventory.name), smart_str(host.name)))
                host.save()
Пример #10
0
class WorkflowJobNode(WorkflowNodeBase):
    job = models.OneToOneField(
        'UnifiedJob',
        related_name='unified_job_node',
        blank=True,
        null=True,
        default=None,
        on_delete=models.SET_NULL,
    )
    workflow_job = models.ForeignKey(
        'WorkflowJob',
        related_name='workflow_job_nodes',
        blank=True,
        null=True,
        default=None,
        on_delete=models.CASCADE,
    )
    ancestor_artifacts = JSONBlob(
        default=dict,
        blank=True,
        editable=False,
    )
    do_not_run = models.BooleanField(
        default=False,
        help_text=
        _("Indicates that a job will not be created when True. Workflow runtime "
          "semantics will mark this True if the node is in a path that will "
          "decidedly not be ran. A value of False means the node may not run."
          ),
    )
    identifier = models.CharField(
        max_length=512,
        blank=True,  # blank denotes pre-migration job nodes
        help_text=
        _('An identifier coresponding to the workflow job template node that this node was created from.'
          ),
    )

    class Meta:
        app_label = 'main'
        indexes = [
            models.Index(fields=["identifier", "workflow_job"]),
            models.Index(fields=['identifier']),
        ]

    @property
    def event_processing_finished(self):
        return True

    def get_absolute_url(self, request=None):
        return reverse('api:workflow_job_node_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    def prompts_dict(self, *args, **kwargs):
        r = super(WorkflowJobNode, self).prompts_dict(*args, **kwargs)
        # Explanation - WFJT extra_vars still break pattern, so they are not
        # put through prompts processing, but inventory and others are only accepted
        # if JT prompts for it, so it goes through this mechanism
        if self.workflow_job:
            if self.workflow_job.inventory_id:
                # workflow job inventory takes precedence
                r['inventory'] = self.workflow_job.inventory
            if self.workflow_job.char_prompts:
                r.update(self.workflow_job.char_prompts)
        return r

    def get_job_kwargs(self):
        """
        In advance of creating a new unified job as part of a workflow,
        this method builds the attributes to use
        It alters the node by saving its updated version of
        ancestor_artifacts, making it available to subsequent nodes.
        """
        # reject/accept prompted fields
        data = {}
        ujt_obj = self.unified_job_template
        if ujt_obj is not None:
            # MERGE note: move this to prompts_dict method on node when merging
            # with the workflow inventory branch
            prompts_data = self.prompts_dict()
            if isinstance(ujt_obj, WorkflowJobTemplate):
                if self.workflow_job.extra_vars:
                    prompts_data.setdefault('extra_vars', {})
                    prompts_data['extra_vars'].update(
                        self.workflow_job.extra_vars_dict)
            accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(
                **prompts_data)
            if errors:
                logger.info(
                    _('Bad launch configuration starting template {template_pk} as part of '
                      'workflow {workflow_pk}. Errors:\n{error_text}').format(
                          template_pk=ujt_obj.pk,
                          workflow_pk=self.pk,
                          error_text=errors))
            data.update(
                accepted_fields)  # missing fields are handled in the scheduler
            try:
                # config saved on the workflow job itself
                wj_config = self.workflow_job.launch_config
            except ObjectDoesNotExist:
                wj_config = None
            if wj_config:
                accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(
                    **wj_config.prompts_dict())
                accepted_fields.pop(
                    'extra_vars',
                    None)  # merge handled with other extra_vars later
                data.update(accepted_fields)
        # build ancestor artifacts, save them to node model for later
        aa_dict = {}
        is_root_node = True
        for parent_node in self.get_parent_nodes():
            is_root_node = False
            aa_dict.update(parent_node.ancestor_artifacts)
            if parent_node.job:
                aa_dict.update(
                    parent_node.job.get_effective_artifacts(
                        parents_set=set([self.workflow_job_id])))
        if aa_dict and not is_root_node:
            self.ancestor_artifacts = aa_dict
            self.save(update_fields=['ancestor_artifacts'])
        # process password list
        password_dict = {}
        if '_ansible_no_log' in aa_dict:
            for key in aa_dict:
                if key != '_ansible_no_log':
                    password_dict[key] = REPLACE_STR
        if self.workflow_job.survey_passwords:
            password_dict.update(self.workflow_job.survey_passwords)
        if self.survey_passwords:
            password_dict.update(self.survey_passwords)
        if password_dict:
            data['survey_passwords'] = password_dict
        # process extra_vars
        extra_vars = data.get('extra_vars', {})
        if ujt_obj and isinstance(ujt_obj, (JobTemplate, WorkflowJobTemplate)):
            if aa_dict:
                functional_aa_dict = copy(aa_dict)
                functional_aa_dict.pop('_ansible_no_log', None)
                extra_vars.update(functional_aa_dict)
        if ujt_obj and isinstance(ujt_obj, JobTemplate):
            # Workflow Job extra_vars higher precedence than ancestor artifacts
            if self.workflow_job and self.workflow_job.extra_vars:
                extra_vars.update(self.workflow_job.extra_vars_dict)
        if extra_vars:
            data['extra_vars'] = extra_vars
        # ensure that unified jobs created by WorkflowJobs are marked
        data['_eager_fields'] = {'launch_type': 'workflow'}
        if self.workflow_job and self.workflow_job.created_by:
            data['_eager_fields']['created_by'] = self.workflow_job.created_by
        # Extra processing in the case that this is a slice job
        if 'job_slice' in self.ancestor_artifacts and is_root_node:
            data['_eager_fields']['allow_simultaneous'] = True
            data['_eager_fields'][
                'job_slice_number'] = self.ancestor_artifacts['job_slice']
            data['_eager_fields'][
                'job_slice_count'] = self.workflow_job.workflow_job_nodes.count(
                )
            data['_prevent_slicing'] = True
        return data
Пример #11
0
class BaseCommandEvent(CreatedModifiedModel):
    """
    An event/message logged from a command for each host.
    """

    VALID_KEYS = ['event_data', 'created', 'counter', 'uuid', 'stdout', 'start_line', 'end_line', 'verbosity']
    WRAPUP_EVENT = 'EOF'

    class Meta:
        abstract = True

    event_data = JSONBlob(default=dict, blank=True)
    uuid = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    counter = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    stdout = models.TextField(
        default='',
        editable=False,
    )
    verbosity = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    start_line = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    end_line = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    created = models.DateTimeField(
        null=True,
        default=None,
        editable=False,
    )
    modified = models.DateTimeField(
        default=None,
        editable=False,
        db_index=True,
    )

    def __str__(self):
        return u'%s @ %s' % (self.get_event_display(), self.created.isoformat())

    @classmethod
    def create_from_data(cls, **kwargs):
        #
        # ⚠️  D-D-D-DANGER ZONE ⚠️
        # This function is called by the callback receiver *once* for *every
        # event* emitted by Ansible as a playbook runs.  That means that
        # changes to this function are _very_ susceptible to introducing
        # performance regressions (which the user will experience as "my
        # playbook stdout takes too long to show up"), *especially* code which
        # might invoke additional database queries per event.
        #
        # Proceed with caution!
        #
        # Convert the datetime for the event's creation
        # appropriately, and include a time zone for it.
        #
        # In the event of any issue, throw it out, and Django will just save
        # the current time.
        try:
            if not isinstance(kwargs['created'], datetime.datetime):
                kwargs['created'] = parse_datetime(kwargs['created'])
            if not kwargs['created'].tzinfo:
                kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
        except (KeyError, ValueError):
            kwargs.pop('created', None)

        sanitize_event_keys(kwargs, cls.VALID_KEYS)
        kwargs.pop('workflow_job_id', None)
        event = cls(**kwargs)
        event._update_from_event_data()
        return event

    def get_event_display(self):
        """
        Needed for __unicode__
        """
        return self.event

    def get_event_display2(self):
        return self.get_event_display()

    def get_host_status_counts(self):
        return create_host_status_counts(getattr(self, 'event_data', {}))

    def _update_from_event_data(self):
        pass
Пример #12
0
class BasePlaybookEvent(CreatedModifiedModel):
    """
    An event/message logged from a playbook callback for each host.
    """

    VALID_KEYS = [
        'event',
        'event_data',
        'playbook',
        'play',
        'role',
        'task',
        'created',
        'counter',
        'uuid',
        'stdout',
        'parent_uuid',
        'start_line',
        'end_line',
        'host_id',
        'host_name',
        'verbosity',
    ]
    WRAPUP_EVENT = 'playbook_on_stats'

    class Meta:
        abstract = True

    # Playbook events will be structured to form the following hierarchy:
    # - playbook_on_start (once for each playbook file)
    #   - playbook_on_vars_prompt (for each play, but before play starts, we
    #     currently don't handle responding to these prompts)
    #   - playbook_on_play_start (once for each play)
    #     - playbook_on_import_for_host (not logged, not used for v2)
    #     - playbook_on_not_import_for_host (not logged, not used for v2)
    #     - playbook_on_no_hosts_matched
    #     - playbook_on_no_hosts_remaining
    #     - playbook_on_include (only v2 - only used for handlers?)
    #     - playbook_on_setup (not used for v2)
    #       - runner_on*
    #     - playbook_on_task_start (once for each task within a play)
    #       - runner_on_failed
    #       - runner_on_start
    #       - runner_on_ok
    #       - runner_on_error (not used for v2)
    #       - runner_on_skipped
    #       - runner_on_unreachable
    #       - runner_on_no_hosts (not used for v2)
    #       - runner_on_async_poll (not used for v2)
    #       - runner_on_async_ok (not used for v2)
    #       - runner_on_async_failed (not used for v2)
    #       - runner_on_file_diff (v2 event is v2_on_file_diff)
    #       - runner_item_on_ok (v2 only)
    #       - runner_item_on_failed (v2 only)
    #       - runner_item_on_skipped (v2 only)
    #       - runner_retry (v2 only)
    #     - playbook_on_notify (once for each notification from the play, not used for v2)
    #   - playbook_on_stats

    EVENT_TYPES = [
        # (level, event, verbose name, failed)
        (3, 'runner_on_failed', _('Host Failed'), True),
        (3, 'runner_on_start', _('Host Started'), False),
        (3, 'runner_on_ok', _('Host OK'), False),
        (3, 'runner_on_error', _('Host Failure'), True),
        (3, 'runner_on_skipped', _('Host Skipped'), False),
        (3, 'runner_on_unreachable', _('Host Unreachable'), True),
        (3, 'runner_on_no_hosts', _('No Hosts Remaining'), False),
        (3, 'runner_on_async_poll', _('Host Polling'), False),
        (3, 'runner_on_async_ok', _('Host Async OK'), False),
        (3, 'runner_on_async_failed', _('Host Async Failure'), True),
        (3, 'runner_item_on_ok', _('Item OK'), False),
        (3, 'runner_item_on_failed', _('Item Failed'), True),
        (3, 'runner_item_on_skipped', _('Item Skipped'), False),
        (3, 'runner_retry', _('Host Retry'), False),
        # Tower does not yet support --diff mode.
        (3, 'runner_on_file_diff', _('File Difference'), False),
        (0, 'playbook_on_start', _('Playbook Started'), False),
        (2, 'playbook_on_notify', _('Running Handlers'), False),
        (2, 'playbook_on_include', _('Including File'), False),
        (2, 'playbook_on_no_hosts_matched', _('No Hosts Matched'), False),
        (2, 'playbook_on_no_hosts_remaining', _('No Hosts Remaining'), False),
        (2, 'playbook_on_task_start', _('Task Started'), False),
        # Tower does not yet support vars_prompt (and will probably hang :)
        (1, 'playbook_on_vars_prompt', _('Variables Prompted'), False),
        (2, 'playbook_on_setup', _('Gathering Facts'), False),
        (2, 'playbook_on_import_for_host', _('internal: on Import for Host'), False),
        (2, 'playbook_on_not_import_for_host', _('internal: on Not Import for Host'), False),
        (1, 'playbook_on_play_start', _('Play Started'), False),
        (1, 'playbook_on_stats', _('Playbook Complete'), False),
        # Additional event types for captured stdout not directly related to
        # playbook or runner events.
        (0, 'debug', _('Debug'), False),
        (0, 'verbose', _('Verbose'), False),
        (0, 'deprecated', _('Deprecated'), False),
        (0, 'warning', _('Warning'), False),
        (0, 'system_warning', _('System Warning'), False),
        (0, 'error', _('Error'), True),
    ]
    FAILED_EVENTS = [x[1] for x in EVENT_TYPES if x[3]]
    EVENT_CHOICES = [(x[1], x[2]) for x in EVENT_TYPES]
    LEVEL_FOR_EVENT = dict([(x[1], x[0]) for x in EVENT_TYPES])

    event = models.CharField(
        max_length=100,
        choices=EVENT_CHOICES,
    )
    event_data = JSONBlob(default=dict, blank=True)
    failed = models.BooleanField(
        default=False,
        editable=False,
    )
    changed = models.BooleanField(
        default=False,
        editable=False,
    )
    uuid = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    playbook = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    play = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    role = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    task = models.CharField(
        max_length=1024,
        default='',
        editable=False,
    )
    counter = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    stdout = models.TextField(
        default='',
        editable=False,
    )
    verbosity = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    start_line = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    end_line = models.PositiveIntegerField(
        default=0,
        editable=False,
    )
    created = models.DateTimeField(
        null=True,
        default=None,
        editable=False,
    )
    modified = models.DateTimeField(
        default=None,
        editable=False,
        db_index=True,
    )

    @property
    def event_level(self):
        return self.LEVEL_FOR_EVENT.get(self.event, 0)

    def get_host_status_counts(self):
        return create_host_status_counts(getattr(self, 'event_data', {}))

    def get_event_display2(self):
        msg = self.get_event_display()
        if self.event == 'playbook_on_play_start':
            if self.play:
                msg = "%s (%s)" % (msg, self.play)
        elif self.event == 'playbook_on_task_start':
            if self.task:
                if self.event_data.get('is_conditional', False):
                    msg = 'Handler Notified'
                if self.role:
                    msg = '%s (%s | %s)' % (msg, self.role, self.task)
                else:
                    msg = "%s (%s)" % (msg, self.task)

        # Change display for runner events triggered by async polling.  Some of
        # these events may not show in most cases, due to filterting them out
        # of the job event queryset returned to the user.
        res = self.event_data.get('res', {})
        # Fix for existing records before we had added the workaround on save
        # to change async_ok to async_failed.
        if self.event == 'runner_on_async_ok':
            try:
                if res.get('failed', False) or res.get('rc', 0) != 0:
                    msg = 'Host Async Failed'
            except (AttributeError, TypeError):
                pass
        # Runner events with ansible_job_id are part of async starting/polling.
        if self.event in ('runner_on_ok', 'runner_on_failed'):
            try:
                module_name = res['invocation']['module_name']
                job_id = res['ansible_job_id']
            except (TypeError, KeyError, AttributeError):
                module_name = None
                job_id = None
            if module_name and job_id:
                if module_name == 'async_status':
                    msg = 'Host Async Checking'
                else:
                    msg = 'Host Async Started'
        # Handle both 1.2 on_failed and 1.3+ on_async_failed events when an
        # async task times out.
        if self.event in ('runner_on_failed', 'runner_on_async_failed'):
            try:
                if res['msg'] == 'timed out':
                    msg = 'Host Async Timeout'
            except (TypeError, KeyError, AttributeError):
                pass
        return msg

    def _update_from_event_data(self):
        # Update event model fields from event data.
        event_data = self.event_data
        res = event_data.get('res', None)
        if self.event in self.FAILED_EVENTS and not event_data.get('ignore_errors', False):
            self.failed = True
        if isinstance(res, dict):
            if res.get('changed', False):
                self.changed = True
        if self.event == 'playbook_on_stats':
            try:
                failures_dict = event_data.get('failures', {})
                dark_dict = event_data.get('dark', {})
                self.failed = bool(sum(failures_dict.values()) + sum(dark_dict.values()))
                changed_dict = event_data.get('changed', {})
                self.changed = bool(sum(changed_dict.values()))
            except (AttributeError, TypeError):
                pass

            if isinstance(self, JobEvent):
                try:
                    job = self.job
                except ObjectDoesNotExist:
                    job = None
                if job:
                    hostnames = self._hostnames()
                    self._update_host_summary_from_stats(set(hostnames))
                    if job.inventory:
                        try:
                            job.inventory.update_computed_fields()
                        except DatabaseError:
                            logger.exception('Computed fields database error saving event {}'.format(self.pk))

                    # find parent links and progagate changed=T and failed=T
                    changed = (
                        job.get_event_queryset()
                        .filter(changed=True)
                        .exclude(parent_uuid=None)
                        .only('parent_uuid')
                        .values_list('parent_uuid', flat=True)
                        .distinct()
                    )  # noqa
                    failed = (
                        job.get_event_queryset()
                        .filter(failed=True)
                        .exclude(parent_uuid=None)
                        .only('parent_uuid')
                        .values_list('parent_uuid', flat=True)
                        .distinct()
                    )  # noqa

                    job.get_event_queryset().filter(uuid__in=changed).update(changed=True)
                    job.get_event_queryset().filter(uuid__in=failed).update(failed=True)

        for field in ('playbook', 'play', 'task', 'role'):
            value = force_str(event_data.get(field, '')).strip()
            if value != getattr(self, field):
                setattr(self, field, value)
        if settings.LOG_AGGREGATOR_ENABLED:
            analytics_logger.info('Event data saved.', extra=dict(python_objects=dict(job_event=self)))

    @classmethod
    def create_from_data(cls, **kwargs):
        #
        # ⚠️  D-D-D-DANGER ZONE ⚠️
        # This function is called by the callback receiver *once* for *every
        # event* emitted by Ansible as a playbook runs.  That means that
        # changes to this function are _very_ susceptible to introducing
        # performance regressions (which the user will experience as "my
        # playbook stdout takes too long to show up"), *especially* code which
        # might invoke additional database queries per event.
        #
        # Proceed with caution!
        #
        pk = None
        for key in ('job_id', 'project_update_id'):
            if key in kwargs:
                pk = key
        if pk is None:
            # payload must contain either a job_id or a project_update_id
            return

        # Convert the datetime for the job event's creation appropriately,
        # and include a time zone for it.
        #
        # In the event of any issue, throw it out, and Django will just save
        # the current time.
        try:
            if not isinstance(kwargs['created'], datetime.datetime):
                kwargs['created'] = parse_datetime(kwargs['created'])
            if not kwargs['created'].tzinfo:
                kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
        except (KeyError, ValueError):
            kwargs.pop('created', None)

        # same as above, for job_created
        # TODO: if this approach, identical to above, works, can convert to for loop
        try:
            if not isinstance(kwargs['job_created'], datetime.datetime):
                kwargs['job_created'] = parse_datetime(kwargs['job_created'])
            if not kwargs['job_created'].tzinfo:
                kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=utc)
        except (KeyError, ValueError):
            kwargs.pop('job_created', None)

        host_map = kwargs.pop('host_map', {})

        sanitize_event_keys(kwargs, cls.VALID_KEYS)
        workflow_job_id = kwargs.pop('workflow_job_id', None)
        event = cls(**kwargs)
        if workflow_job_id:
            setattr(event, 'workflow_job_id', workflow_job_id)
        # shouldn't job_created _always_ be present?
        # if it's not, how could we save the event to the db?
        job_created = kwargs.pop('job_created', None)
        if job_created:
            setattr(event, 'job_created', job_created)
        setattr(event, 'host_map', host_map)
        event._update_from_event_data()
        return event

    @property
    def job_verbosity(self):
        return 0
Пример #13
0
class ActivityStream(models.Model):
    """
    Model used to describe activity stream (audit) events
    """
    class Meta:
        app_label = 'main'
        ordering = ('pk', )

    OPERATION_CHOICES = [
        ('create', _('Entity Created')),
        ('update', _("Entity Updated")),
        ('delete', _("Entity Deleted")),
        ('associate', _("Entity Associated with another Entity")),
        ('disassociate', _("Entity was Disassociated with another Entity")),
    ]

    actor = models.ForeignKey('auth.User',
                              null=True,
                              on_delete=models.SET_NULL,
                              related_name='activity_stream')
    operation = models.CharField(max_length=13, choices=OPERATION_CHOICES)
    timestamp = models.DateTimeField(auto_now_add=True)
    changes = accepts_json(models.TextField(blank=True))
    deleted_actor = JSONBlob(null=True)
    action_node = models.CharField(
        blank=True,
        default='',
        editable=False,
        max_length=512,
        help_text=_("The cluster node the activity took place on."),
    )

    object_relationship_type = models.TextField(blank=True)
    object1 = models.TextField()
    object2 = models.TextField()

    user = models.ManyToManyField("auth.User", blank=True)
    organization = models.ManyToManyField("Organization", blank=True)
    inventory = models.ManyToManyField("Inventory", blank=True)
    host = models.ManyToManyField("Host", blank=True)
    group = models.ManyToManyField("Group", blank=True)
    inventory_source = models.ManyToManyField("InventorySource", blank=True)
    inventory_update = models.ManyToManyField("InventoryUpdate", blank=True)
    credential = models.ManyToManyField("Credential", blank=True)
    credential_type = models.ManyToManyField("CredentialType", blank=True)
    team = models.ManyToManyField("Team", blank=True)
    project = models.ManyToManyField("Project", blank=True)
    project_update = models.ManyToManyField("ProjectUpdate", blank=True)
    execution_environment = models.ManyToManyField("ExecutionEnvironment",
                                                   blank=True)
    job_template = models.ManyToManyField("JobTemplate", blank=True)
    job = models.ManyToManyField("Job", blank=True)
    workflow_job_template_node = models.ManyToManyField(
        "WorkflowJobTemplateNode", blank=True)
    workflow_job_node = models.ManyToManyField("WorkflowJobNode", blank=True)
    workflow_job_template = models.ManyToManyField("WorkflowJobTemplate",
                                                   blank=True)
    workflow_job = models.ManyToManyField("WorkflowJob", blank=True)
    workflow_approval_template = models.ManyToManyField(
        "WorkflowApprovalTemplate", blank=True)
    workflow_approval = models.ManyToManyField("WorkflowApproval", blank=True)
    unified_job_template = models.ManyToManyField(
        "UnifiedJobTemplate",
        blank=True,
        related_name='activity_stream_as_unified_job_template+')
    unified_job = models.ManyToManyField(
        "UnifiedJob",
        blank=True,
        related_name='activity_stream_as_unified_job+')
    ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True)
    schedule = models.ManyToManyField("Schedule", blank=True)
    execution_environment = models.ManyToManyField("ExecutionEnvironment",
                                                   blank=True)
    notification_template = models.ManyToManyField("NotificationTemplate",
                                                   blank=True)
    notification = models.ManyToManyField("Notification", blank=True)
    label = models.ManyToManyField("Label", blank=True)
    role = models.ManyToManyField("Role", blank=True)
    instance = models.ManyToManyField("Instance", blank=True)
    instance_group = models.ManyToManyField("InstanceGroup", blank=True)
    o_auth2_application = models.ManyToManyField("OAuth2Application",
                                                 blank=True)
    o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken",
                                                  blank=True)

    setting = JSONBlob(default=dict, blank=True)

    def __str__(self):
        operation = self.operation if 'operation' in self.__dict__ else '_delayed_'
        if 'timestamp' in self.__dict__:
            if self.timestamp:
                timestamp = self.timestamp.isoformat()
            else:
                timestamp = self.timestamp
        else:
            timestamp = '_delayed_'
        return u'%s-%s-pk=%s' % (operation, timestamp, self.pk)

    def get_absolute_url(self, request=None):
        return reverse('api:activity_stream_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    def save(self, *args, **kwargs):
        # Store denormalized actor metadata so that we retain it for accounting
        # purposes when the User row is deleted.
        if self.actor:
            self.deleted_actor = {
                'id': self.actor_id,
                'username': smart_str(self.actor.username),
                'first_name': smart_str(self.actor.first_name),
                'last_name': smart_str(self.actor.last_name),
            }
            if 'update_fields' in kwargs and 'deleted_actor' not in kwargs[
                    'update_fields']:
                kwargs['update_fields'].append('deleted_actor')

        hostname_char_limit = self._meta.get_field('action_node').max_length
        self.action_node = settings.CLUSTER_HOST_ID[:hostname_char_limit]

        super(ActivityStream, self).save(*args, **kwargs)
Пример #14
0
class NotificationTemplate(CommonModelNameNotUnique):

    NOTIFICATION_TYPES = [
        ('email', _('Email'), CustomEmailBackend),
        ('slack', _('Slack'), SlackBackend),
        ('twilio', _('Twilio'), TwilioBackend),
        ('pagerduty', _('Pagerduty'), PagerDutyBackend),
        ('grafana', _('Grafana'), GrafanaBackend),
        ('webhook', _('Webhook'), WebhookBackend),
        ('mattermost', _('Mattermost'), MattermostBackend),
        ('rocketchat', _('Rocket.Chat'), RocketChatBackend),
        ('irc', _('IRC'), IrcBackend),
    ]
    NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1])
                                        for x in NOTIFICATION_TYPES])
    CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2])
                                        for x in NOTIFICATION_TYPES])

    class Meta:
        app_label = 'main'
        unique_together = ('organization', 'name')
        ordering = ("name", )

    organization = models.ForeignKey(
        'Organization',
        blank=False,
        null=True,
        on_delete=models.CASCADE,
        related_name='notification_templates',
    )

    notification_type = models.CharField(
        max_length=32,
        choices=NOTIFICATION_TYPE_CHOICES,
    )

    notification_configuration = prevent_search(JSONBlob(default=dict))

    def default_messages():
        return {
            'started': None,
            'success': None,
            'error': None,
            'workflow_approval': None
        }

    messages = JSONBlob(
        null=True,
        blank=True,
        default=default_messages,
        help_text=_('Optional custom messages for notification template.'))

    def has_message(self, condition):
        potential_template = self.messages.get(condition, {})
        if potential_template == {}:
            return False
        if potential_template.get('message', {}) == {}:
            return False
        return True

    def get_message(self, condition):
        return self.messages.get(condition, {})

    def get_absolute_url(self, request=None):
        return reverse('api:notification_template_detail',
                       kwargs={'pk': self.pk},
                       request=request)

    @property
    def notification_class(self):
        return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]

    def save(self, *args, **kwargs):
        new_instance = not bool(self.pk)
        update_fields = kwargs.get('update_fields', [])

        # preserve existing notification messages if not overwritten by new messages
        if not new_instance:
            old_nt = NotificationTemplate.objects.get(pk=self.id)
            old_messages = old_nt.messages
            new_messages = self.messages

            def merge_messages(local_old_messages, local_new_messages,
                               local_event):
                if local_new_messages.get(local_event,
                                          {}) and local_old_messages.get(
                                              local_event, {}):
                    local_old_event_msgs = local_old_messages[local_event]
                    local_new_event_msgs = local_new_messages[local_event]
                    for msg_type in ['message', 'body']:
                        if msg_type not in local_new_event_msgs and local_old_event_msgs.get(
                                msg_type, None):
                            local_new_event_msgs[
                                msg_type] = local_old_event_msgs[msg_type]

            if old_messages is not None and new_messages is not None:
                for event in ('started', 'success', 'error',
                              'workflow_approval'):
                    if not new_messages.get(event, {}) and old_messages.get(
                            event, {}):
                        new_messages[event] = old_messages[event]
                        continue

                    if event == 'workflow_approval' and old_messages.get(
                            'workflow_approval', None):
                        new_messages.setdefault('workflow_approval', {})
                        for subevent in ('running', 'approved', 'timed_out',
                                         'denied'):
                            old_wfa_messages = old_messages[
                                'workflow_approval']
                            new_wfa_messages = new_messages[
                                'workflow_approval']
                            if not new_wfa_messages.get(
                                    subevent, {}) and old_wfa_messages.get(
                                        subevent, {}):
                                new_wfa_messages[subevent] = old_wfa_messages[
                                    subevent]
                                continue
                            if old_wfa_messages:
                                merge_messages(old_wfa_messages,
                                               new_wfa_messages, subevent)
                    else:
                        merge_messages(old_messages, new_messages, event)
                    new_messages.setdefault(event, None)

        for field in filter(
                lambda x: self.notification_class.init_parameters[x]['type'] ==
                "password", self.notification_class.init_parameters):
            if self.notification_configuration[field].startswith(
                    "$encrypted$"):
                continue
            if new_instance:
                value = self.notification_configuration[field]
                setattr(self, '_saved_{}_{}'.format("config", field), value)
                self.notification_configuration[field] = ''
            else:
                encrypted = encrypt_field(self,
                                          'notification_configuration',
                                          subfield=field)
                self.notification_configuration[field] = encrypted
                if 'notification_configuration' not in update_fields:
                    update_fields.append('notification_configuration')
        super(NotificationTemplate, self).save(*args, **kwargs)
        if new_instance:
            update_fields = []
            for field in filter(
                    lambda x: self.notification_class.init_parameters[x][
                        'type'] == "password",
                    self.notification_class.init_parameters):
                saved_value = getattr(self,
                                      '_saved_{}_{}'.format("config",
                                                            field), '')
                self.notification_configuration[field] = saved_value
                if 'notification_configuration' not in update_fields:
                    update_fields.append('notification_configuration')
            self.save(update_fields=update_fields)

    @property
    def recipients(self):
        return self.notification_configuration[
            self.notification_class.recipient_parameter]

    def generate_notification(self, msg, body):
        notification = Notification(notification_template=self,
                                    notification_type=self.notification_type,
                                    recipients=smart_str(self.recipients),
                                    subject=msg,
                                    body=body)
        notification.save()
        return notification

    def send(self, subject, body):
        for field in filter(
                lambda x: self.notification_class.init_parameters[x]['type'] ==
                "password", self.notification_class.init_parameters):
            if field in self.notification_configuration:
                self.notification_configuration[field] = decrypt_field(
                    self, 'notification_configuration', subfield=field)
        recipients = self.notification_configuration.pop(
            self.notification_class.recipient_parameter)
        if not isinstance(recipients, list):
            recipients = [recipients]
        sender = self.notification_configuration.pop(
            self.notification_class.sender_parameter, None)
        notification_configuration = deepcopy(self.notification_configuration)
        for field, params in self.notification_class.init_parameters.items():
            if field not in notification_configuration:
                if 'default' in params:
                    notification_configuration[field] = params['default']
        backend_obj = self.notification_class(**notification_configuration)
        notification_obj = EmailMessage(subject, backend_obj.format_body(body),
                                        sender, recipients)
        with set_environ(**settings.AWX_TASK_ENV):
            return backend_obj.send_messages([notification_obj])

    def display_notification_configuration(self):
        field_val = self.notification_configuration.copy()
        for field in self.notification_class.init_parameters:
            if field in field_val and force_str(
                    field_val[field]).startswith('$encrypted$'):
                field_val[field] = '$encrypted$'
        return field_val
Пример #15
0
class SurveyJobTemplateMixin(models.Model):
    class Meta:
        abstract = True

    survey_enabled = models.BooleanField(default=False, )
    survey_spec = prevent_search(JSONBlob(default=dict, blank=True))
    ask_variables_on_launch = AskForField(blank=True,
                                          default=False,
                                          allows_field='extra_vars')

    def survey_password_variables(self):
        vars = []
        if self.survey_enabled and 'spec' in self.survey_spec:
            # Get variables that are type password
            for survey_element in self.survey_spec['spec']:
                if survey_element['type'] == 'password':
                    vars.append(survey_element['variable'])
        return vars

    @property
    def variables_needed_to_start(self):
        vars = []
        if self.survey_enabled and 'spec' in self.survey_spec:
            for survey_element in self.survey_spec['spec']:
                if survey_element['required']:
                    vars.append(survey_element['variable'])
        return vars

    def _update_unified_job_kwargs(self, create_kwargs, kwargs):
        """
        Combine extra_vars with variable precedence order:
          JT extra_vars -> JT survey defaults -> runtime extra_vars

        :param create_kwargs: key-worded arguments to be updated and later used for creating unified job.
        :type create_kwargs: dict
        :param kwargs: request parameters used to override unified job template fields with runtime values.
        :type kwargs: dict
        :return: modified create_kwargs.
        :rtype: dict
        """
        # Job Template extra_vars
        extra_vars = self.extra_vars_dict

        survey_defaults = {}

        # transform to dict
        if 'extra_vars' in kwargs:
            runtime_extra_vars = kwargs['extra_vars']
            runtime_extra_vars = parse_yaml_or_json(runtime_extra_vars)
        else:
            runtime_extra_vars = {}

        # Overwrite job template extra vars with survey default vars
        if self.survey_enabled and 'spec' in self.survey_spec:
            for survey_element in self.survey_spec.get("spec", []):
                default = survey_element.get('default')
                variable_key = survey_element.get('variable')

                if survey_element.get('type') == 'password':
                    if variable_key in runtime_extra_vars:
                        kw_value = runtime_extra_vars[variable_key]
                        if kw_value == '$encrypted$':
                            runtime_extra_vars.pop(variable_key)

                if default is not None:
                    decrypted_default = default
                    if survey_element['type'] == "password" and isinstance(
                            decrypted_default, str
                    ) and decrypted_default.startswith('$encrypted$'):
                        decrypted_default = decrypt_value(
                            get_encryption_key('value', pk=None),
                            decrypted_default)
                    errors = self._survey_element_validation(
                        survey_element, {variable_key: decrypted_default})
                    if not errors:
                        survey_defaults[variable_key] = default
        extra_vars.update(survey_defaults)

        # Overwrite job template extra vars with explicit job extra vars
        # and add on job extra vars
        extra_vars.update(runtime_extra_vars)
        create_kwargs['extra_vars'] = json.dumps(extra_vars)
        return create_kwargs

    def _survey_element_validation(self,
                                   survey_element,
                                   data,
                                   validate_required=True):
        # Don't apply validation to the `$encrypted$` placeholder; the decrypted
        # default (if any) will be validated against instead
        errors = []

        if survey_element['type'] == "password":
            password_value = data.get(survey_element['variable'])
            if isinstance(password_value,
                          str) and password_value == '$encrypted$':
                if survey_element.get(
                        'default') is None and survey_element['required']:
                    if validate_required:
                        errors.append("'%s' value missing" %
                                      survey_element['variable'])
                return errors

        if survey_element['variable'] not in data and survey_element[
                'required']:
            if validate_required:
                errors.append("'%s' value missing" %
                              survey_element['variable'])
        elif survey_element['type'] in ["textarea", "text", "password"]:
            if survey_element['variable'] in data:
                if not isinstance(data[survey_element['variable']], str):
                    errors.append(
                        "Value %s for '%s' expected to be a string." %
                        (data[survey_element['variable']],
                         survey_element['variable']))
                    return errors

                if 'min' in survey_element and survey_element['min'] not in [
                        "", None
                ] and len(data[survey_element['variable']]) < int(
                        survey_element['min']):
                    errors.append(
                        "'%s' value %s is too small (length is %s must be at least %s)."
                        % (survey_element['variable'],
                           data[survey_element['variable']],
                           len(data[survey_element['variable']]),
                           survey_element['min']))
                if 'max' in survey_element and survey_element['max'] not in [
                        "", None
                ] and len(data[survey_element['variable']]) > int(
                        survey_element['max']):
                    errors.append(
                        "'%s' value %s is too large (must be no more than %s)."
                        % (survey_element['variable'],
                           data[survey_element['variable']],
                           survey_element['max']))

        elif survey_element['type'] == 'integer':
            if survey_element['variable'] in data:
                if type(data[survey_element['variable']]) != int:
                    errors.append(
                        "Value %s for '%s' expected to be an integer." %
                        (data[survey_element['variable']],
                         survey_element['variable']))
                    return errors
                if ('min' in survey_element
                        and survey_element['min'] not in ["", None]
                        and survey_element['variable'] in data
                        and data[survey_element['variable']] < int(
                            survey_element['min'])):
                    errors.append(
                        "'%s' value %s is too small (must be at least %s)." %
                        (survey_element['variable'],
                         data[survey_element['variable']],
                         survey_element['min']))
                if ('max' in survey_element
                        and survey_element['max'] not in ["", None]
                        and survey_element['variable'] in data
                        and data[survey_element['variable']] > int(
                            survey_element['max'])):
                    errors.append(
                        "'%s' value %s is too large (must be no more than %s)."
                        % (survey_element['variable'],
                           data[survey_element['variable']],
                           survey_element['max']))
        elif survey_element['type'] == 'float':
            if survey_element['variable'] in data:
                if type(data[survey_element['variable']]) not in (float, int):
                    errors.append(
                        "Value %s for '%s' expected to be a numeric type." %
                        (data[survey_element['variable']],
                         survey_element['variable']))
                    return errors
                if 'min' in survey_element and survey_element['min'] not in [
                        "", None
                ] and data[survey_element['variable']] < float(
                        survey_element['min']):
                    errors.append(
                        "'%s' value %s is too small (must be at least %s)." %
                        (survey_element['variable'],
                         data[survey_element['variable']],
                         survey_element['min']))
                if 'max' in survey_element and survey_element['max'] not in [
                        "", None
                ] and data[survey_element['variable']] > float(
                        survey_element['max']):
                    errors.append(
                        "'%s' value %s is too large (must be no more than %s)."
                        % (survey_element['variable'],
                           data[survey_element['variable']],
                           survey_element['max']))
        elif survey_element['type'] == 'multiselect':
            if survey_element['variable'] in data:
                if type(data[survey_element['variable']]) != list:
                    errors.append("'%s' value is expected to be a list." %
                                  survey_element['variable'])
                else:
                    choice_list = copy(survey_element['choices'])
                    if isinstance(choice_list, str):
                        choice_list = [
                            choice for choice in choice_list.splitlines()
                            if choice.strip() != ''
                        ]
                    for val in data[survey_element['variable']]:
                        if val not in choice_list:
                            errors.append(
                                "Value %s for '%s' expected to be one of %s." %
                                (val, survey_element['variable'], choice_list))
        elif survey_element['type'] == 'multiplechoice':
            choice_list = copy(survey_element['choices'])
            if isinstance(choice_list, str):
                choice_list = [
                    choice for choice in choice_list.splitlines()
                    if choice.strip() != ''
                ]
            if survey_element['variable'] in data:
                if data[survey_element['variable']] not in choice_list:
                    errors.append(
                        "Value %s for '%s' expected to be one of %s." %
                        (data[survey_element['variable']],
                         survey_element['variable'], choice_list))
        return errors

    def _accept_or_ignore_variables(self,
                                    data,
                                    errors=None,
                                    _exclude_errors=(),
                                    extra_passwords=None):
        survey_is_enabled = self.survey_enabled and self.survey_spec
        extra_vars = data.copy()
        if errors is None:
            errors = {}
        rejected = {}
        accepted = {}

        if survey_is_enabled:
            # Check for data violation of survey rules
            survey_errors = []
            for survey_element in self.survey_spec.get("spec", []):
                key = survey_element.get('variable', None)
                value = data.get(key, None)
                validate_required = 'required' not in _exclude_errors
                if extra_passwords and key in extra_passwords and is_encrypted(
                        value):
                    element_errors = self._survey_element_validation(
                        survey_element, {
                            key:
                            decrypt_value(get_encryption_key('value', pk=None),
                                          value)
                        },
                        validate_required=validate_required)
                else:
                    element_errors = self._survey_element_validation(
                        survey_element,
                        data,
                        validate_required=validate_required)

                if element_errors:
                    survey_errors += element_errors
                    if key is not None and key in extra_vars:
                        rejected[key] = extra_vars.pop(key)
                elif key in extra_vars:
                    accepted[key] = extra_vars.pop(key)
            if survey_errors:
                errors['variables_needed_to_start'] = survey_errors

        if self.ask_variables_on_launch:
            # We can accept all variables
            accepted.update(extra_vars)
            extra_vars = {}

        if extra_vars:
            # Prune the prompted variables for those identical to template
            tmp_extra_vars = self.extra_vars_dict
            for key in set(tmp_extra_vars.keys()) & set(extra_vars.keys()):
                if tmp_extra_vars[key] == extra_vars[key]:
                    extra_vars.pop(key)

        if extra_vars:
            # Leftover extra_vars, keys provided that are not allowed
            rejected.update(extra_vars)
            # ignored variables does not block manual launch
            if 'prompts' not in _exclude_errors:
                errors['extra_vars'] = [
                    _('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '
                      + 'on the {model_name} to include Extra Variables.').
                    format(list_of_keys=', '.join(
                        [str(key) for key in extra_vars.keys()]),
                           model_name=self._meta.verbose_name.title())
                ]

        return (accepted, rejected, errors)

    @staticmethod
    def pivot_spec(spec):
        """
        Utility method that will return a dictionary keyed off variable names
        """
        pivoted = {}
        for element_data in spec.get('spec', []):
            if 'variable' in element_data:
                pivoted[element_data['variable']] = element_data
        return pivoted

    def survey_variable_validation(self, data):
        errors = []
        if not self.survey_enabled:
            return errors
        if 'name' not in self.survey_spec:
            errors.append("'name' missing from survey spec.")
        if 'description' not in self.survey_spec:
            errors.append("'description' missing from survey spec.")
        for survey_element in self.survey_spec.get("spec", []):
            errors += self._survey_element_validation(survey_element, data)
        return errors

    def display_survey_spec(self):
        """
        Hide encrypted default passwords in survey specs
        """
        survey_spec = deepcopy(self.survey_spec) if self.survey_spec else {}
        for field in survey_spec.get('spec', []):
            if field.get('type') == 'password':
                if 'default' in field and field['default']:
                    field['default'] = '$encrypted$'
        return survey_spec