class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin): class Meta: app_label = 'main' ordering = ('id',) system_job_template = models.ForeignKey( 'SystemJobTemplate', related_name='jobs', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) extra_vars = prevent_search(models.TextField( blank=True, default='', )) extra_vars_dict = VarsDictProperty('extra_vars', True) @classmethod def _get_parent_field_name(cls): return 'system_job_template' @classmethod def _get_task_class(cls): from awx.main.tasks import RunSystemJob return RunSystemJob def websocket_emit_data(self): return {} def get_absolute_url(self, request=None): return reverse('api:system_job_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): return urljoin(settings.TOWER_URL_BASE, "/#/jobs/system/{}".format(self.pk)) @property def event_class(self): return SystemJobEvent @property def task_impact(self): return 5 @property def preferred_instance_groups(self): return self.global_instance_groups ''' JobNotificationMixin ''' def get_notification_templates(self): return self.system_job_template.notification_templates def get_notification_friendly_name(self): return "System Job"
class LaunchTimeConfig(LaunchTimeConfigBase): ''' Common model for all objects that save details of a saved launch config WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet) ''' class Meta: abstract = True # Special case prompting fields, even more special than the other ones extra_data = JSONField(blank=True, default={}) survey_passwords = prevent_search( JSONField( blank=True, default={}, editable=False, )) # Credentials needed for non-unified job / unified JT models credentials = models.ManyToManyField('Credential', related_name='%(class)ss') @property def extra_vars(self): return self.extra_data @extra_vars.setter def extra_vars(self, extra_vars): self.extra_data = extra_vars
class Setting(CreatedModifiedModel): key = models.CharField(max_length=255, ) value = JSONField(null=True, ) user = prevent_search( models.ForeignKey( 'auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE, )) def __str__(self): try: json_value = json.dumps(self.value) except ValueError: # In the rare case the DB value is invalid JSON. json_value = u'<Invalid JSON>' if self.user: return u'{} ({}) = {}'.format(self.key, self.user, json_value) else: return u'{} = {}'.format(self.key, json_value) def save(self, *args, **kwargs): encrypted = settings_registry.is_setting_encrypted(self.key) new_instance = not bool(self.pk) # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. update_fields = kwargs.get('update_fields', []) # When first saving to the database, don't store any encrypted field # value, but instead save it until after the instance is created. # Otherwise, store encrypted value to the database. if encrypted: if new_instance: self._saved_value = self.value self.value = '' else: self.value = encrypt_field(self, 'value') if 'value' not in update_fields: update_fields.append('value') super(Setting, self).save(*args, **kwargs) # After saving a new instance for the first time, set the encrypted # field and save again. if encrypted and new_instance: from awx.main.signals import disable_activity_stream with disable_activity_stream(): self.value = self._saved_value self.save(update_fields=['value']) @classmethod def get_cache_key(self, key): return key @classmethod def get_cache_id_key(self, key): return '{}_ID'.format(key)
class WebhookTemplateMixin(models.Model): class Meta: abstract = True SERVICES = [ ('github', "GitHub"), ('gitlab', "GitLab"), ] webhook_service = models.CharField( max_length=16, choices=SERVICES, blank=True, help_text=_('Service that webhook requests will be accepted from')) webhook_key = prevent_search( models.CharField( max_length=64, blank=True, help_text= _('Shared secret that the webhook service will use to sign requests' ))) webhook_credential = models.ForeignKey( 'Credential', blank=True, null=True, on_delete=models.SET_NULL, related_name='%(class)ss', help_text= _('Personal Access Token for posting back the status to the service API' ), ) def rotate_webhook_key(self): self.webhook_key = get_random_string(length=50) def save(self, *args, **kwargs): update_fields = kwargs.get('update_fields') if not self.pk or self._values_have_edits( {'webhook_service': self.webhook_service}): if self.webhook_service: self.rotate_webhook_key() else: self.webhook_key = '' if update_fields and 'webhook_service' in update_fields: update_fields.add('webhook_key') super().save(*args, **kwargs)
class LaunchTimeConfig(LaunchTimeConfigBase): ''' Common model for all objects that save details of a saved launch config WFJT / WJ nodes, schedules, and job launch configs (not all implemented yet) ''' class Meta: abstract = True # Special case prompting fields, even more special than the other ones extra_data = JSONField( blank=True, default=dict ) survey_passwords = prevent_search(JSONField( blank=True, default=dict, editable=False, )) # Credentials needed for non-unified job / unified JT models credentials = models.ManyToManyField( 'Credential', related_name='%(class)ss' ) @property def extra_vars(self): return self.extra_data @extra_vars.setter def extra_vars(self, extra_vars): self.extra_data = extra_vars def display_extra_vars(self): ''' Hides fields marked as passwords in survey. ''' if hasattr(self, 'survey_passwords') and self.survey_passwords: extra_vars = parse_yaml_or_json(self.extra_vars).copy() for key, value in self.survey_passwords.items(): if key in extra_vars: extra_vars[key] = value return extra_vars else: return self.extra_vars def display_extra_data(self): return self.display_extra_vars()
class SurveyJobMixin(models.Model): class Meta: abstract = True survey_passwords = prevent_search( JSONField( blank=True, default={}, editable=False, )) def display_extra_vars(self): ''' Hides fields marked as passwords in survey. ''' if self.survey_passwords: extra_vars = json.loads(self.extra_vars) for key, value in self.survey_passwords.items(): if key in extra_vars: extra_vars[key] = value return json.dumps(extra_vars) else: return self.extra_vars def decrypted_extra_vars(self): ''' Decrypts fields marked as passwords in survey. ''' if self.survey_passwords: extra_vars = json.loads(self.extra_vars) for key in self.survey_passwords: value = extra_vars.get(key) if value and isinstance( value, six.string_types) and value.startswith('$encrypted$'): extra_vars[key] = decrypt_value( get_encryption_key('value', pk=None), value) return json.dumps(extra_vars) else: return self.extra_vars
class SurveyJobMixin(models.Model): class Meta: abstract = True survey_passwords = prevent_search( JSONField( blank=True, default={}, editable=False, )) def display_extra_vars(self): ''' Hides fields marked as passwords in survey. ''' if self.survey_passwords: extra_vars = json.loads(self.extra_vars) for key, value in self.survey_passwords.items(): if key in extra_vars: extra_vars[key] = value return json.dumps(extra_vars) else: return self.extra_vars
class AdHocCommand(UnifiedJob, JobNotificationMixin): class Meta(object): app_label = 'main' diff_mode = models.BooleanField(default=False, ) job_type = models.CharField( max_length=64, choices=AD_HOC_JOB_TYPE_CHOICES, default='run', ) inventory = models.ForeignKey( 'Inventory', related_name='ad_hoc_commands', null=True, on_delete=models.SET_NULL, ) limit = models.TextField( blank=True, default='', ) credential = models.ForeignKey( 'Credential', related_name='ad_hoc_commands', null=True, default=None, on_delete=models.SET_NULL, ) module_name = models.CharField( max_length=1024, default='', blank=True, ) module_args = models.TextField( blank=True, default='', ) forks = models.PositiveIntegerField( blank=True, default=0, ) verbosity = models.PositiveIntegerField( choices=VERBOSITY_CHOICES, blank=True, default=0, ) become_enabled = models.BooleanField(default=False, ) hosts = models.ManyToManyField( 'Host', related_name='ad_hoc_commands', editable=False, through='AdHocCommandEvent', ) extra_vars = prevent_search(models.TextField( blank=True, default='', )) extra_vars_dict = VarsDictProperty('extra_vars', True) def clean_inventory(self): inv = self.inventory if not inv: raise ValidationError(_('No valid inventory.')) return inv def clean_credential(self): cred = self.credential if cred and cred.kind != 'ssh': raise ValidationError( _('You must provide a machine / SSH credential.'), ) return cred def clean_limit(self): # FIXME: Future feature - check if no hosts would match and reject the # command, instead of having to run it to find out. return self.limit def clean_module_name(self): if type(self.module_name) is not str: raise ValidationError(_("Invalid type for ad hoc command")) module_name = self.module_name.strip() or 'command' if module_name not in settings.AD_HOC_COMMANDS: raise ValidationError(_('Unsupported module for ad hoc commands.')) return module_name def clean_module_args(self): if type(self.module_args) is not str: raise ValidationError(_("Invalid type for ad hoc command")) module_args = self.module_args if self.module_name in ('command', 'shell') and not module_args: raise ValidationError( _('No argument passed to %s module.') % self.module_name) return module_args @property def event_class(self): return AdHocCommandEvent @property def passwords_needed_to_start(self): '''Return list of password field names needed to start the job.''' if self.credential: return self.credential.passwords_needed else: return [] def _get_parent_field_name(self): return '' @classmethod def _get_task_class(cls): from awx.main.tasks import RunAdHocCommand return RunAdHocCommand @classmethod def supports_isolation(cls): return True def get_absolute_url(self, request=None): return reverse('api:ad_hoc_command_detail', kwargs={'pk': self.pk}, request=request) def get_ui_url(self): return urljoin(settings.TOWER_URL_BASE, "/#/jobs/command/{}".format(self.pk)) @property def notification_templates(self): all_orgs = set() for h in self.hosts.all(): all_orgs.add(h.inventory.organization) active_templates = dict(error=set(), success=set(), any=set()) base_notification_templates = NotificationTemplate.objects for org in all_orgs: for templ in base_notification_templates.filter( organization_notification_templates_for_errors=org): active_templates['error'].add(templ) for templ in base_notification_templates.filter( organization_notification_templates_for_success=org): active_templates['success'].add(templ) for templ in base_notification_templates.filter( organization_notification_templates_for_any=org): active_templates['any'].add(templ) active_templates['error'] = list(active_templates['error']) active_templates['any'] = list(active_templates['any']) active_templates['success'] = list(active_templates['success']) return active_templates def get_passwords_needed_to_start(self): return self.passwords_needed_to_start @property def task_impact(self): # NOTE: We sorta have to assume the host count matches and that forks default to 5 from awx.main.models.inventory import Host count_hosts = Host.objects.filter( enabled=True, inventory__ad_hoc_commands__pk=self.pk).count() return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 def copy(self): data = {} for field in ('job_type', 'inventory_id', 'limit', 'credential_id', 'module_name', 'module_args', 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode'): data[field] = getattr(self, field) return AdHocCommand.objects.create(**data) def save(self, *args, **kwargs): update_fields = kwargs.get('update_fields', []) if not self.name: self.name = Truncator(u': '.join( filter(None, (self.module_name, self.module_args)))).chars(512) if 'name' not in update_fields: update_fields.append('name') super(AdHocCommand, self).save(*args, **kwargs) @property def preferred_instance_groups(self): if self.inventory is not None and self.inventory.organization is not None: organization_groups = [ x for x in self.inventory.organization.instance_groups.all() ] else: organization_groups = [] if self.inventory is not None: inventory_groups = [ x for x in self.inventory.instance_groups.all() ] else: inventory_groups = [] selected_groups = inventory_groups + organization_groups if not selected_groups: return self.global_instance_groups return selected_groups ''' JobNotificationMixin ''' def get_notification_templates(self): return self.notification_templates def get_notification_friendly_name(self): return "AdHoc Command"
class JobOptions(BaseModel): ''' Common options for job templates and jobs. ''' class Meta: abstract = True diff_mode = models.BooleanField( default=False, help_text= _("If enabled, textual changes made to any templated files on the host are shown in the standard output" ), ) job_type = models.CharField( max_length=64, choices=JOB_TYPE_CHOICES, default='run', ) inventory = models.ForeignKey( 'Inventory', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) project = models.ForeignKey( 'Project', related_name='%(class)ss', null=True, default=None, blank=True, on_delete=models.SET_NULL, ) playbook = models.CharField( max_length=1024, default='', blank=True, ) scm_branch = models.CharField( max_length=1024, default='', blank=True, help_text=_( 'Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'), ) forks = models.PositiveIntegerField( blank=True, default=0, ) limit = models.TextField( blank=True, default='', ) verbosity = models.PositiveIntegerField( choices=VERBOSITY_CHOICES, blank=True, default=0, ) extra_vars = prevent_search( accepts_json(models.TextField( blank=True, default='', ))) job_tags = models.CharField( max_length=1024, blank=True, default='', ) force_handlers = models.BooleanField( blank=True, default=False, ) skip_tags = models.CharField( max_length=1024, blank=True, default='', ) start_at_task = models.CharField( max_length=1024, blank=True, default='', ) become_enabled = models.BooleanField(default=False, ) allow_simultaneous = models.BooleanField(default=False, ) timeout = models.IntegerField( blank=True, default=0, help_text= _("The amount of time (in seconds) to run before the task is canceled." ), ) use_fact_cache = models.BooleanField( default=False, help_text= _("If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting " "facts at the end of a playbook run to the database and caching facts for use by Ansible." ), ) extra_vars_dict = VarsDictProperty('extra_vars', True) @property def machine_credential(self): return self.credentials.filter(credential_type__kind='ssh').first() @property def network_credentials(self): return list(self.credentials.filter(credential_type__kind='net')) @property def cloud_credentials(self): return list(self.credentials.filter(credential_type__kind='cloud')) @property def vault_credentials(self): return list(self.credentials.filter(credential_type__kind='vault')) @property def passwords_needed_to_start(self): '''Return list of password field names needed to start the job.''' needed = [] # Unsaved credential objects can not require passwords if not self.pk: return needed for cred in self.credentials.all(): needed.extend(cred.passwords_needed) return needed
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): """A model representing a Queue/Group of AWX Instances.""" objects = InstanceGroupManager() name = models.CharField(max_length=250, unique=True) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) instances = models.ManyToManyField( 'Instance', related_name='rampart_groups', editable=False, help_text=_('Instances that are members of this InstanceGroup'), ) controller = models.ForeignKey( 'InstanceGroup', related_name='controlled_groups', help_text=_('Instance Group to remotely control this group.'), editable=False, default=None, null=True, on_delete=models.CASCADE) credential = models.ForeignKey( 'Credential', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) pod_spec_override = prevent_search( models.TextField( blank=True, default='', )) policy_instance_percentage = models.IntegerField( default=0, help_text=_( "Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField( default=0, help_text= _("Static minimum number of Instances to automatically assign to this group" )) policy_instance_list = JSONField( default=[], blank=True, help_text= _("List of exact-match Instances that will always be automatically assigned to this group" )) POLICY_FIELDS = frozenset( ('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage', 'controller')) def get_absolute_url(self, request=None): return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request) @property def capacity(self): return sum([inst.capacity for inst in self.instances.all()]) @property def jobs_running(self): return UnifiedJob.objects.filter(status__in=('running', 'waiting'), instance_group=self).count() @property def jobs_total(self): return UnifiedJob.objects.filter(instance_group=self).count() @property def is_controller(self): return self.controlled_groups.exists() @property def is_isolated(self): return bool(self.controller) @property def is_containerized(self): return bool(self.credential and self.credential.kubernetes) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(instance_group=self) class Meta: app_label = 'main' @staticmethod def fit_task_to_most_remaining_capacity_instance(task, instances): instance_most_capacity = None for i in instances: if i.remaining_capacity >= task.task_impact and \ (instance_most_capacity is None or i.remaining_capacity > instance_most_capacity.remaining_capacity): instance_most_capacity = i return instance_most_capacity @staticmethod def find_largest_idle_instance(instances): largest_instance = None for i in instances: if i.jobs_running == 0: if largest_instance is None: largest_instance = i elif i.capacity > largest_instance.capacity: largest_instance = i return largest_instance def choose_online_controller_node(self): return random.choice( list( self.controller.instances.filter( capacity__gt=0, enabled=True).values_list('hostname', flat=True))) def set_default_policy_fields(self): self.policy_instance_list = [] self.policy_instance_minimum = 0 self.policy_instance_percentage = 0
class SurveyJobTemplateMixin(models.Model): class Meta: abstract = True survey_enabled = models.BooleanField(default=False, ) survey_spec = prevent_search(JSONField( blank=True, default=dict, )) ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars') def survey_password_variables(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: # Get variables that are type password for survey_element in self.survey_spec['spec']: if survey_element['type'] == 'password': vars.append(survey_element['variable']) return vars @property def variables_needed_to_start(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec['spec']: if survey_element['required']: vars.append(survey_element['variable']) return vars def _update_unified_job_kwargs(self, create_kwargs, kwargs): ''' Combine extra_vars with variable precedence order: JT extra_vars -> JT survey defaults -> runtime extra_vars :param create_kwargs: key-worded arguments to be updated and later used for creating unified job. :type create_kwargs: dict :param kwargs: request parameters used to override unified job template fields with runtime values. :type kwargs: dict :return: modified create_kwargs. :rtype: dict ''' # Job Template extra_vars extra_vars = self.extra_vars_dict survey_defaults = {} # transform to dict if 'extra_vars' in kwargs: runtime_extra_vars = kwargs['extra_vars'] runtime_extra_vars = parse_yaml_or_json(runtime_extra_vars) else: runtime_extra_vars = {} # Overwrite job template extra vars with survey default vars if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec.get("spec", []): default = survey_element.get('default') variable_key = survey_element.get('variable') if survey_element.get('type') == 'password': if variable_key in runtime_extra_vars: kw_value = runtime_extra_vars[variable_key] if kw_value == '$encrypted$': runtime_extra_vars.pop(variable_key) if default is not None: decrypted_default = default if (survey_element['type'] == "password" and isinstance(decrypted_default, str) and decrypted_default.startswith('$encrypted$')): decrypted_default = decrypt_value( get_encryption_key('value', pk=None), decrypted_default) errors = self._survey_element_validation( survey_element, {variable_key: decrypted_default}) if not errors: survey_defaults[variable_key] = default extra_vars.update(survey_defaults) # Overwrite job template extra vars with explicit job extra vars # and add on job extra vars extra_vars.update(runtime_extra_vars) create_kwargs['extra_vars'] = json.dumps(extra_vars) return create_kwargs def _survey_element_validation(self, survey_element, data, validate_required=True): # Don't apply validation to the `$encrypted$` placeholder; the decrypted # default (if any) will be validated against instead errors = [] if (survey_element['type'] == "password"): password_value = data.get(survey_element['variable']) if (isinstance(password_value, str) and password_value == '$encrypted$'): if survey_element.get( 'default') is None and survey_element['required']: if validate_required: errors.append("'%s' value missing" % survey_element['variable']) return errors if survey_element['variable'] not in data and survey_element[ 'required']: if validate_required: errors.append("'%s' value missing" % survey_element['variable']) elif survey_element['type'] in ["textarea", "text", "password"]: if survey_element['variable'] in data: if not isinstance(data[survey_element['variable']], str): errors.append( "Value %s for '%s' expected to be a string." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and len(data[survey_element['variable']]) < int( survey_element['min']): errors.append( "'%s' value %s is too small (length is %s must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and len(data[survey_element['variable']]) > int( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'integer': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != int: errors.append( "Value %s for '%s' expected to be an integer." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] < int(survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] > int(survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'float': if survey_element['variable'] in data: if type(data[survey_element['variable']]) not in (float, int): errors.append( "Value %s for '%s' expected to be a numeric type." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and data[survey_element['variable']] < float( survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and data[survey_element['variable']] > float( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'multiselect': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != list: errors.append("'%s' value is expected to be a list." % survey_element['variable']) else: choice_list = copy(survey_element['choices']) if isinstance(choice_list, str): choice_list = [ choice for choice in choice_list.splitlines() if choice.strip() != '' ] for val in data[survey_element['variable']]: if val not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'], choice_list)) elif survey_element['type'] == 'multiplechoice': choice_list = copy(survey_element['choices']) if isinstance(choice_list, str): choice_list = [ choice for choice in choice_list.splitlines() if choice.strip() != '' ] if survey_element['variable'] in data: if data[survey_element['variable']] not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']], survey_element['variable'], choice_list)) return errors def _accept_or_ignore_variables(self, data, errors=None, _exclude_errors=(), extra_passwords=None): survey_is_enabled = (self.survey_enabled and self.survey_spec) extra_vars = data.copy() if errors is None: errors = {} rejected = {} accepted = {} if survey_is_enabled: # Check for data violation of survey rules survey_errors = [] for survey_element in self.survey_spec.get("spec", []): key = survey_element.get('variable', None) value = data.get(key, None) validate_required = 'required' not in _exclude_errors if extra_passwords and key in extra_passwords and is_encrypted( value): element_errors = self._survey_element_validation( survey_element, { key: decrypt_value(get_encryption_key('value', pk=None), value) }, validate_required=validate_required) else: element_errors = self._survey_element_validation( survey_element, data, validate_required=validate_required) if element_errors: survey_errors += element_errors if key is not None and key in extra_vars: rejected[key] = extra_vars.pop(key) elif key in extra_vars: accepted[key] = extra_vars.pop(key) if survey_errors: errors['variables_needed_to_start'] = survey_errors if self.ask_variables_on_launch: # We can accept all variables accepted.update(extra_vars) extra_vars = {} if extra_vars: # Prune the prompted variables for those identical to template tmp_extra_vars = self.extra_vars_dict for key in (set(tmp_extra_vars.keys()) & set(extra_vars.keys())): if tmp_extra_vars[key] == extra_vars[key]: extra_vars.pop(key) if extra_vars: # Leftover extra_vars, keys provided that are not allowed rejected.update(extra_vars) # ignored variables does not block manual launch if 'prompts' not in _exclude_errors: errors['extra_vars'] = [ _('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting ' + 'on the {model_name} to include Extra Variables.'). format(list_of_keys=', '.join( [str(key) for key in extra_vars.keys()]), model_name=self._meta.verbose_name.title()) ] return (accepted, rejected, errors) @staticmethod def pivot_spec(spec): ''' Utility method that will return a dictionary keyed off variable names ''' pivoted = {} for element_data in spec.get('spec', []): if 'variable' in element_data: pivoted[element_data['variable']] = element_data return pivoted def survey_variable_validation(self, data): errors = [] if not self.survey_enabled: return errors if 'name' not in self.survey_spec: errors.append("'name' missing from survey spec.") if 'description' not in self.survey_spec: errors.append("'description' missing from survey spec.") for survey_element in self.survey_spec.get("spec", []): errors += self._survey_element_validation(survey_element, data) return errors def display_survey_spec(self): ''' Hide encrypted default passwords in survey specs ''' survey_spec = deepcopy(self.survey_spec) if self.survey_spec else {} for field in survey_spec.get('spec', []): if field.get('type') == 'password': if 'default' in field and field['default']: field['default'] = '$encrypted$' return survey_spec
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): """A model representing a Queue/Group of AWX Instances.""" name = models.CharField(max_length=250, unique=True) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) instances = models.ManyToManyField( 'Instance', related_name='rampart_groups', editable=False, help_text=_('Instances that are members of this InstanceGroup'), ) is_container_group = models.BooleanField(default=False) credential = models.ForeignKey( 'Credential', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) pod_spec_override = prevent_search( models.TextField( blank=True, default='', )) policy_instance_percentage = models.IntegerField( default=0, help_text=_( "Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField( default=0, help_text= _("Static minimum number of Instances to automatically assign to this group" )) policy_instance_list = JSONBlob( default=list, blank=True, help_text= _("List of exact-match Instances that will always be automatically assigned to this group" )) POLICY_FIELDS = frozenset( ('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage')) def get_absolute_url(self, request=None): return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request) @property def capacity(self): return sum(inst.capacity for inst in self.instances.all()) @property def jobs_running(self): return UnifiedJob.objects.filter(status__in=('running', 'waiting'), instance_group=self).count() @property def jobs_total(self): return UnifiedJob.objects.filter(instance_group=self).count() ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(instance_group=self) class Meta: app_label = 'main' def set_default_policy_fields(self): self.policy_instance_list = [] self.policy_instance_minimum = 0 self.policy_instance_percentage = 0
class JobOptions(BaseModel): ''' Common options for job templates and jobs. ''' class Meta: abstract = True diff_mode = models.BooleanField( default=False, help_text= _("If enabled, textual changes made to any templated files on the host are shown in the standard output" ), ) job_type = models.CharField( max_length=64, choices=JOB_TYPE_CHOICES, default='run', ) inventory = models.ForeignKey( 'Inventory', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) project = models.ForeignKey( 'Project', related_name='%(class)ss', null=True, default=None, blank=True, on_delete=models.SET_NULL, ) playbook = models.CharField( max_length=1024, default='', blank=True, ) forks = models.PositiveIntegerField( blank=True, default=0, ) limit = models.TextField( blank=True, default='', ) verbosity = models.PositiveIntegerField( choices=VERBOSITY_CHOICES, blank=True, default=0, ) extra_vars = prevent_search(models.TextField( blank=True, default='', )) job_tags = models.CharField( max_length=1024, blank=True, default='', ) force_handlers = models.BooleanField( blank=True, default=False, ) skip_tags = models.CharField( max_length=1024, blank=True, default='', ) start_at_task = models.CharField( max_length=1024, blank=True, default='', ) become_enabled = models.BooleanField(default=False, ) allow_simultaneous = models.BooleanField(default=False, ) timeout = models.IntegerField( blank=True, default=0, help_text= _("The amount of time (in seconds) to run before the task is canceled." ), ) use_fact_cache = models.BooleanField( default=False, help_text= _("If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting " "facts at the end of a playbook run to the database and caching facts for use by Ansible." ), ) extra_vars_dict = VarsDictProperty('extra_vars', True) def clean_credential(self): cred = self.credential if cred and cred.kind != 'ssh': raise ValidationError(_('You must provide an SSH credential.'), ) return cred def clean_vault_credential(self): cred = self.vault_credential if cred and cred.kind != 'vault': raise ValidationError(_('You must provide a Vault credential.'), ) return cred @property def network_credentials(self): return list(self.credentials.filter(credential_type__kind='net')) @property def cloud_credentials(self): return list(self.credentials.filter(credential_type__kind='cloud')) @property def vault_credentials(self): return list(self.credentials.filter(credential_type__kind='vault')) @property def credential(self): cred = self.get_deprecated_credential('ssh') if cred is not None: return cred.pk @property def vault_credential(self): cred = self.get_deprecated_credential('vault') if cred is not None: return cred.pk def get_deprecated_credential(self, kind): for cred in self.credentials.all(): if cred.credential_type.kind == kind: return cred else: return None # TODO: remove when API v1 is removed @property def cloud_credential(self): try: return self.cloud_credentials[-1].pk except IndexError: return None # TODO: remove when API v1 is removed @property def network_credential(self): try: return self.network_credentials[-1].pk except IndexError: return None @property def passwords_needed_to_start(self): '''Return list of password field names needed to start the job.''' needed = [] # Unsaved credential objects can not require passwords if not self.pk: return needed for cred in self.credentials.all(): needed.extend(cred.passwords_needed) return needed
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin): """ A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. """ FIELDS_TO_PRESERVE_AT_COPY = [ 'labels', 'instance_groups', 'credentials', 'survey_spec' ] FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' ordering = ('name', ) job_type = models.CharField( max_length=64, choices=NEW_JOB_TYPE_CHOICES, default='run', ) host_config_key = prevent_search( models.CharField( max_length=1024, blank=True, default='', )) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags') ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials') ask_scm_branch_on_launch = AskForField(blank=True, default=False, allows_field='scm_branch') job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_( "The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1." ), ) admin_role = ImplicitRoleField( parent_role=['organization.job_template_admin_role']) execute_role = ImplicitRoleField( parent_role=['admin_role', 'organization.execute_role'], ) read_role = ImplicitRoleField( parent_role=[ 'organization.auditor_role', 'inventory.organization.auditor_role', # partial support for old inheritance via inventory 'execute_role', 'admin_role', ], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set([ 'name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count', 'execution_environment' ]) @property def validation_errors(self): """ Fields needed to start, which cannot be given on launch, invalid state. """ validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [ _("Job Template must provide 'inventory' or allow prompting for it." ), ] if self.project is None: validation_errors['project'] = [ _("Job Templates must have a project assigned."), ] return validation_errors @property def resources_needed_to_start(self): return [ fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd)) ] def clean_forks(self): if settings.MAX_FORKS > 0 and self.forks > settings.MAX_FORKS: raise ValidationError( _(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.')) return self.forks def create_job(self, **kwargs): """ Create a new job based on this template. """ return self.create_unified_job(**kwargs) def get_effective_slice_ct(self, kwargs): actual_inventory = self.inventory if self.ask_inventory_on_launch and 'inventory' in kwargs: actual_inventory = kwargs['inventory'] if actual_inventory: return min(self.job_slice_count, actual_inventory.hosts.count()) else: return self.job_slice_count def save(self, *args, **kwargs): update_fields = kwargs.get('update_fields', []) # if project is deleted for some reason, then keep the old organization # to retain ownership for organization admins if self.project and self.project.organization_id != self.organization_id: self.organization_id = self.project.organization_id if 'organization' not in update_fields and 'organization_id' not in update_fields: update_fields.append('organization_id') return super(JobTemplate, self).save(*args, **kwargs) def validate_unique(self, exclude=None): """Custom over-ride for JT specifically because organization is inferred from project after full_clean is finished thus the organization field is not yet set when validation happens """ errors = [] for ut in JobTemplate.SOFT_UNIQUE_TOGETHER: kwargs = {'name': self.name} if self.project: kwargs['organization'] = self.project.organization_id else: kwargs['organization'] = None qs = JobTemplate.objects.filter(**kwargs) if self.pk: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'}))) if errors: raise ValidationError(errors) def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_ct = self.get_effective_slice_ct(kwargs) slice_event = bool(slice_ct > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif self.job_slice_count > 1 and (not prevent_slicing): # Unique case where JT was set to slice but hosts not available kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['job_slice_count'] = 1 elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: for idx in range(slice_ct): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): """ Return whether job template can be used to start a new job without requiring any user input. """ variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return not prompting_needed and not self.passwords_needed_to_start and not variables_needed def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[ field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif field_name == 'scm_branch' and old_value == '' and self.project and new_value == self.project.scm_branch: # special case of "not provided" for branches # job template does not provide branch, runs with default branch continue elif getattr(self, ask_field_name): # Special case where prompts can be rejected based on project setting if field_name == 'scm_branch': if not self.project: rejected_data[field_name] = new_value errors_dict[field_name] = _('Project is missing.') continue if kwargs[ 'scm_branch'] != self.project.scm_branch and not self.project.allow_override: rejected_data[field_name] = new_value errors_dict[field_name] = _( 'Project does not allow override of branch.') continue # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _( 'Field is not configured to prompt on launch.') if 'prompts' not in exclude_errors and (not getattr( self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start: errors_dict['passwords_needed_to_start'] = _( 'Saved launch configurations cannot provide passwords needed to start.' ) needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append( _("Job Template {} is missing or undefined.").format( resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter( job_template=self, status__in=[ 'pending', 'waiting', 'running' ]).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self, self.project ])) started_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_started__in=[ self, self.project ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self, self.project ])) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. organization))) started_notification_templates = set( started_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_started=self. organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. organization))) return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates)) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(unified_job_template=self)
class SurveyJobTemplateMixin(models.Model): class Meta: abstract = True survey_enabled = models.BooleanField(default=False, ) survey_spec = prevent_search(JSONField( blank=True, default={}, )) ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars') def survey_password_variables(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: # Get variables that are type password for survey_element in self.survey_spec['spec']: if survey_element['type'] == 'password': vars.append(survey_element['variable']) return vars @property def variables_needed_to_start(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec['spec']: if survey_element['required']: vars.append(survey_element['variable']) return vars def _update_unified_job_kwargs(self, create_kwargs, kwargs): ''' Combine extra_vars with variable precedence order: JT extra_vars -> JT survey defaults -> runtime extra_vars :param create_kwargs: key-worded arguments to be updated and later used for creating unified job. :type create_kwargs: dict :param kwargs: request parameters used to override unified job template fields with runtime values. :type kwargs: dict :return: modified create_kwargs. :rtype: dict ''' # Job Template extra_vars extra_vars = self.extra_vars_dict survey_defaults = {} # transform to dict if 'extra_vars' in kwargs: runtime_extra_vars = kwargs['extra_vars'] runtime_extra_vars = parse_yaml_or_json(runtime_extra_vars) else: runtime_extra_vars = {} # Overwrite with job template extra vars with survey default vars if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec.get("spec", []): default = survey_element.get('default') variable_key = survey_element.get('variable') if survey_element.get('type') == 'password': if variable_key in runtime_extra_vars and default: kw_value = runtime_extra_vars[variable_key] if kw_value.startswith( '$encrypted$') and kw_value != default: runtime_extra_vars[variable_key] = default if default is not None: data = {variable_key: default} errors = self._survey_element_validation( survey_element, data) if not errors: survey_defaults[variable_key] = default extra_vars.update(survey_defaults) # Overwrite job template extra vars with explicit job extra vars # and add on job extra vars extra_vars.update(runtime_extra_vars) create_kwargs['extra_vars'] = json.dumps(extra_vars) return create_kwargs def _survey_element_validation(self, survey_element, data): errors = [] if survey_element['variable'] not in data and survey_element[ 'required']: errors.append("'%s' value missing" % survey_element['variable']) elif survey_element['type'] in ["textarea", "text", "password"]: if survey_element['variable'] in data: if type(data[survey_element['variable']]) not in (str, unicode): errors.append( "Value %s for '%s' expected to be a string." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and len(data[survey_element['variable']]) < int( survey_element['min']): errors.append( "'%s' value %s is too small (length is %s must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and len(data[survey_element['variable']]) > int( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'integer': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != int: errors.append( "Value %s for '%s' expected to be an integer." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] < int(survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] > int(survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'float': if survey_element['variable'] in data: if type(data[survey_element['variable']]) not in (float, int): errors.append( "Value %s for '%s' expected to be a numeric type." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and data[survey_element['variable']] < float( survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and data[survey_element['variable']] > float( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'multiselect': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != list: errors.append("'%s' value is expected to be a list." % survey_element['variable']) else: choice_list = copy(survey_element['choices']) if isinstance(choice_list, basestring): choice_list = choice_list.split('\n') for val in data[survey_element['variable']]: if val not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'], choice_list)) elif survey_element['type'] == 'multiplechoice': choice_list = copy(survey_element['choices']) if isinstance(choice_list, basestring): choice_list = choice_list.split('\n') if survey_element['variable'] in data: if data[survey_element['variable']] not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']], survey_element['variable'], choice_list)) return errors def _accept_or_ignore_variables(self, data, errors=None, _exclude_errors=()): survey_is_enabled = (self.survey_enabled and self.survey_spec) extra_vars = data.copy() if errors is None: errors = {} rejected = {} accepted = {} if survey_is_enabled: # Check for data violation of survey rules survey_errors = [] for survey_element in self.survey_spec.get("spec", []): element_errors = self._survey_element_validation( survey_element, data) key = survey_element.get('variable', None) if element_errors: survey_errors += element_errors if key is not None and key in extra_vars: rejected[key] = extra_vars.pop(key) elif key in extra_vars: accepted[key] = extra_vars.pop(key) if survey_errors: errors['variables_needed_to_start'] = survey_errors if self.ask_variables_on_launch: # We can accept all variables accepted.update(extra_vars) extra_vars = {} if extra_vars: # Leftover extra_vars, keys provided that are not allowed rejected.update(extra_vars) # ignored variables does not block manual launch if 'prompts' not in _exclude_errors: errors['extra_vars'] = [ _('Variables {list_of_keys} are not allowed on launch.'). format(list_of_keys=', '.join(extra_vars.keys())) ] return (accepted, rejected, errors)
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): """A model representing a Queue/Group of AWX Instances.""" objects = InstanceGroupManager() name = models.CharField(max_length=250, unique=True) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) instances = models.ManyToManyField( 'Instance', related_name='rampart_groups', editable=False, help_text=_('Instances that are members of this InstanceGroup'), ) is_container_group = models.BooleanField(default=False) credential = models.ForeignKey( 'Credential', related_name='%(class)ss', blank=True, null=True, default=None, on_delete=models.SET_NULL, ) pod_spec_override = prevent_search( models.TextField( blank=True, default='', )) policy_instance_percentage = models.IntegerField( default=0, help_text=_( "Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField( default=0, help_text= _("Static minimum number of Instances to automatically assign to this group" )) policy_instance_list = JSONBlob( default=list, blank=True, help_text= _("List of exact-match Instances that will always be automatically assigned to this group" )) POLICY_FIELDS = frozenset( ('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage')) def get_absolute_url(self, request=None): return reverse('api:instance_group_detail', kwargs={'pk': self.pk}, request=request) @property def capacity(self): return sum(inst.capacity for inst in self.instances.all()) @property def jobs_running(self): return UnifiedJob.objects.filter(status__in=('running', 'waiting'), instance_group=self).count() @property def jobs_total(self): return UnifiedJob.objects.filter(instance_group=self).count() ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(instance_group=self) class Meta: app_label = 'main' @staticmethod def fit_task_to_most_remaining_capacity_instance( task, instances, impact=None, capacity_type=None, add_hybrid_control_cost=False): impact = impact if impact else task.task_impact capacity_type = capacity_type if capacity_type else task.capacity_type instance_most_capacity = None most_remaining_capacity = -1 for i in instances: if i.node_type not in (capacity_type, 'hybrid'): continue would_be_remaining = i.remaining_capacity - impact # hybrid nodes _always_ control their own tasks if add_hybrid_control_cost and i.node_type == 'hybrid': would_be_remaining -= settings.AWX_CONTROL_NODE_TASK_IMPACT if would_be_remaining >= 0 and ( instance_most_capacity is None or would_be_remaining > most_remaining_capacity): instance_most_capacity = i most_remaining_capacity = would_be_remaining return instance_most_capacity @staticmethod def find_largest_idle_instance(instances, capacity_type='execution'): largest_instance = None for i in instances: if i.node_type not in (capacity_type, 'hybrid'): continue if i.jobs_running == 0: if largest_instance is None: largest_instance = i elif i.capacity > largest_instance.capacity: largest_instance = i return largest_instance def set_default_policy_fields(self): self.policy_instance_list = [] self.policy_instance_minimum = 0 self.policy_instance_percentage = 0
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin): ''' A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. ''' FIELDS_TO_PRESERVE_AT_COPY = [ 'labels', 'instance_groups', 'credentials', 'survey_spec' ] FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')] class Meta: app_label = 'main' ordering = ('name', ) host_config_key = prevent_search( models.CharField( max_length=1024, blank=True, default='', )) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags') ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials') job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_( "The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1." ), ) admin_role = ImplicitRoleField(parent_role=[ 'project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role' ]) execute_role = ImplicitRoleField(parent_role=[ 'admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role' ], ) read_role = ImplicitRoleField(parent_role=[ 'project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role' ], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set([ 'name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count' ]) @property def validation_errors(self): ''' Fields needed to start, which cannot be given on launch, invalid state. ''' validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [ _("Job Template must provide 'inventory' or allow prompting for it." ), ] if self.project is None: validation_errors['project'] = [ _("Job Templates must have a project assigned."), ] return validation_errors @property def resources_needed_to_start(self): return [ fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd)) ] def create_job(self, **kwargs): ''' Create a new job based on this template. ''' return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_event = bool(self.job_slice_count > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: try: wj_config = job.launch_config except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory for idx in range( min(self.job_slice_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): ''' Return whether job template can be used to start a new job without requiring any user input. ''' variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return (not prompting_needed and not self.passwords_needed_to_start and not variables_needed) def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[ field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) if getattr(self, '_deprecated_credential_launch', False): # TODO: remove this code branch when support for `extra_credentials` goes away new_value = set(kwargs[field_name]) else: new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif getattr(self, ask_field_name): # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _( 'Field is not configured to prompt on launch.').format( field_name=field_name) if ('prompts' not in exclude_errors and (not getattr(self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start): errors_dict['passwords_needed_to_start'] = _( 'Saved launch configurations cannot provide passwords needed to start.' ) needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append( _("Job Template {} is missing or undefined.").format( resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter( job_template=self, status__in=[ 'pending', 'waiting', 'running' ]).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self, self.project ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self, self.project ])) any_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any__in=[ self, self.project ])) # Get Organization NotificationTemplates if self.project is not None and self.project.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. project.organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. project.organization))) any_notification_templates = set(any_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_any=self.project. organization))) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(unified_job_template=self)
activity_stream_registrar.connect(Team) activity_stream_registrar.connect(Project) # activity_stream_registrar.connect(ProjectUpdate) activity_stream_registrar.connect(ExecutionEnvironment) activity_stream_registrar.connect(JobTemplate) activity_stream_registrar.connect(Job) activity_stream_registrar.connect(AdHocCommand) # activity_stream_registrar.connect(JobHostSummary) # activity_stream_registrar.connect(JobEvent) # activity_stream_registrar.connect(Profile) activity_stream_registrar.connect(Schedule) activity_stream_registrar.connect(NotificationTemplate) activity_stream_registrar.connect(Notification) activity_stream_registrar.connect(Label) activity_stream_registrar.connect(User) activity_stream_registrar.connect(WorkflowJobTemplate) activity_stream_registrar.connect(WorkflowJobTemplateNode) activity_stream_registrar.connect(WorkflowJob) activity_stream_registrar.connect(WorkflowApproval) activity_stream_registrar.connect(WorkflowApprovalTemplate) activity_stream_registrar.connect(OAuth2Application) activity_stream_registrar.connect(OAuth2AccessToken) # prevent API filtering on certain Django-supplied sensitive fields prevent_search(User._meta.get_field('password')) prevent_search(OAuth2AccessToken._meta.get_field('token')) prevent_search(RefreshToken._meta.get_field('token')) prevent_search(OAuth2Application._meta.get_field('client_secret')) prevent_search(OAuth2Application._meta.get_field('client_id')) prevent_search(Grant._meta.get_field('code'))
class NotificationTemplate(CommonModelNameNotUnique): NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend), ('slack', _('Slack'), SlackBackend), ('twilio', _('Twilio'), TwilioBackend), ('pagerduty', _('Pagerduty'), PagerDutyBackend), ('grafana', _('Grafana'), GrafanaBackend), ('hipchat', _('HipChat'), HipChatBackend), ('webhook', _('Webhook'), WebhookBackend), ('mattermost', _('Mattermost'), MattermostBackend), ('rocketchat', _('Rocket.Chat'), RocketChatBackend), ('irc', _('IRC'), IrcBackend)] NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES]) CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES]) class Meta: app_label = 'main' unique_together = ('organization', 'name') ordering = ("name", ) organization = models.ForeignKey( 'Organization', blank=False, null=True, on_delete=models.CASCADE, related_name='notification_templates', ) notification_type = models.CharField( max_length=32, choices=NOTIFICATION_TYPE_CHOICES, ) notification_configuration = prevent_search(JSONField(blank=False)) def default_messages(): return { 'started': None, 'success': None, 'error': None, 'workflow_approval': None } messages = JSONField( null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) def has_message(self, condition): potential_template = self.messages.get(condition, {}) if potential_template == {}: return False if potential_template.get('message', {}) == {}: return False return True def get_message(self, condition): return self.messages.get(condition, {}) def get_absolute_url(self, request=None): return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request) @property def notification_class(self): return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type] def save(self, *args, **kwargs): new_instance = not bool(self.pk) update_fields = kwargs.get('update_fields', []) # preserve existing notification messages if not overwritten by new messages if not new_instance: old_nt = NotificationTemplate.objects.get(pk=self.id) old_messages = old_nt.messages new_messages = self.messages def merge_messages(local_old_messages, local_new_messages, local_event): if local_new_messages.get(local_event, {}) and local_old_messages.get( local_event, {}): local_old_event_msgs = local_old_messages[local_event] local_new_event_msgs = local_new_messages[local_event] for msg_type in ['message', 'body']: if msg_type not in local_new_event_msgs and local_old_event_msgs.get( msg_type, None): local_new_event_msgs[ msg_type] = local_old_event_msgs[msg_type] if old_messages is not None and new_messages is not None: for event in ('started', 'success', 'error', 'workflow_approval'): if not new_messages.get(event, {}) and old_messages.get( event, {}): new_messages[event] = old_messages[event] continue if event == 'workflow_approval' and old_messages.get( 'workflow_approval', None): new_messages.setdefault('workflow_approval', {}) for subevent in ('running', 'approved', 'timed_out', 'denied'): old_wfa_messages = old_messages[ 'workflow_approval'] new_wfa_messages = new_messages[ 'workflow_approval'] if not new_wfa_messages.get( subevent, {}) and old_wfa_messages.get( subevent, {}): new_wfa_messages[subevent] = old_wfa_messages[ subevent] continue if old_wfa_messages: merge_messages(old_wfa_messages, new_wfa_messages, subevent) else: merge_messages(old_messages, new_messages, event) new_messages.setdefault(event, None) for field in filter( lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters): if self.notification_configuration[field].startswith( "$encrypted$"): continue if new_instance: value = self.notification_configuration[field] setattr(self, '_saved_{}_{}'.format("config", field), value) self.notification_configuration[field] = '' else: encrypted = encrypt_field(self, 'notification_configuration', subfield=field) self.notification_configuration[field] = encrypted if 'notification_configuration' not in update_fields: update_fields.append('notification_configuration') super(NotificationTemplate, self).save(*args, **kwargs) if new_instance: update_fields = [] for field in filter( lambda x: self.notification_class.init_parameters[x][ 'type'] == "password", self.notification_class.init_parameters): saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '') self.notification_configuration[field] = saved_value if 'notification_configuration' not in update_fields: update_fields.append('notification_configuration') self.save(update_fields=update_fields) @property def recipients(self): return self.notification_configuration[ self.notification_class.recipient_parameter] def generate_notification(self, msg, body): notification = Notification(notification_template=self, notification_type=self.notification_type, recipients=smart_str(self.recipients), subject=msg, body=body) notification.save() return notification def send(self, subject, body): for field in filter( lambda x: self.notification_class.init_parameters[x]['type'] == "password", self.notification_class.init_parameters): if field in self.notification_configuration: self.notification_configuration[field] = decrypt_field( self, 'notification_configuration', subfield=field) recipients = self.notification_configuration.pop( self.notification_class.recipient_parameter) if not isinstance(recipients, list): recipients = [recipients] sender = self.notification_configuration.pop( self.notification_class.sender_parameter, None) notification_configuration = deepcopy(self.notification_configuration) for field, params in self.notification_class.init_parameters.items(): if field not in notification_configuration: if 'default' in params: notification_configuration[field] = params['default'] backend_obj = self.notification_class(**notification_configuration) notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients) with set_environ(**settings.AWX_TASK_ENV): return backend_obj.send_messages([notification_obj]) def display_notification_configuration(self): field_val = self.notification_configuration.copy() for field in self.notification_class.init_parameters: if field in field_val and force_text( field_val[field]).startswith('$encrypted$'): field_val[field] = '$encrypted$' return field_val
class SurveyJobTemplateMixin(models.Model): class Meta: abstract = True survey_enabled = models.BooleanField(default=False, ) survey_spec = prevent_search(JSONField( blank=True, default={}, )) def survey_password_variables(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: # Get variables that are type password for survey_element in self.survey_spec['spec']: if survey_element['type'] == 'password': vars.append(survey_element['variable']) return vars @property def variables_needed_to_start(self): vars = [] if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec['spec']: if survey_element['required']: vars.append(survey_element['variable']) return vars def _update_unified_job_kwargs(self, create_kwargs, kwargs): ''' Combine extra_vars with variable precedence order: JT extra_vars -> JT survey defaults -> runtime extra_vars :param create_kwargs: key-worded arguments to be updated and later used for creating unified job. :type create_kwargs: dict :param kwargs: request parameters used to override unified job template fields with runtime values. :type kwargs: dict :return: modified create_kwargs. :rtype: dict ''' # Job Template extra_vars extra_vars = self.extra_vars_dict survey_defaults = {} # transform to dict if 'extra_vars' in kwargs: runtime_extra_vars = kwargs['extra_vars'] runtime_extra_vars = parse_yaml_or_json(runtime_extra_vars) else: runtime_extra_vars = {} # Overwrite with job template extra vars with survey default vars if self.survey_enabled and 'spec' in self.survey_spec: for survey_element in self.survey_spec.get("spec", []): default = survey_element.get('default') variable_key = survey_element.get('variable') if survey_element.get('type') == 'password': if variable_key in runtime_extra_vars and default: kw_value = runtime_extra_vars[variable_key] if kw_value.startswith( '$encrypted$') and kw_value != default: runtime_extra_vars[variable_key] = default if default is not None: data = {variable_key: default} errors = self._survey_element_validation( survey_element, data) if not errors: survey_defaults[variable_key] = default extra_vars.update(survey_defaults) # Overwrite job template extra vars with explicit job extra vars # and add on job extra vars extra_vars.update(runtime_extra_vars) create_kwargs['extra_vars'] = json.dumps(extra_vars) return create_kwargs def _survey_element_validation(self, survey_element, data): errors = [] if survey_element['variable'] not in data and survey_element[ 'required']: errors.append("'%s' value missing" % survey_element['variable']) elif survey_element['type'] in ["textarea", "text", "password"]: if survey_element['variable'] in data: if type(data[survey_element['variable']]) not in (str, unicode): errors.append( "Value %s for '%s' expected to be a string." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and len(data[survey_element['variable']]) < int( survey_element['min']): errors.append( "'%s' value %s is too small (length is %s must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and len(data[survey_element['variable']]) > int( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'integer': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != int: errors.append( "Value %s for '%s' expected to be an integer." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] < int(survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \ data[survey_element['variable']] > int(survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'float': if survey_element['variable'] in data: if type(data[survey_element['variable']]) not in (float, int): errors.append( "Value %s for '%s' expected to be a numeric type." % (data[survey_element['variable']], survey_element['variable'])) return errors if 'min' in survey_element and survey_element['min'] not in [ "", None ] and data[survey_element['variable']] < float( survey_element['min']): errors.append( "'%s' value %s is too small (must be at least %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['min'])) if 'max' in survey_element and survey_element['max'] not in [ "", None ] and data[survey_element['variable']] > float( survey_element['max']): errors.append( "'%s' value %s is too large (must be no more than %s)." % (survey_element['variable'], data[survey_element['variable']], survey_element['max'])) elif survey_element['type'] == 'multiselect': if survey_element['variable'] in data: if type(data[survey_element['variable']]) != list: errors.append("'%s' value is expected to be a list." % survey_element['variable']) else: choice_list = copy(survey_element['choices']) if isinstance(choice_list, basestring): choice_list = choice_list.split('\n') for val in data[survey_element['variable']]: if val not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (val, survey_element['variable'], choice_list)) elif survey_element['type'] == 'multiplechoice': choice_list = copy(survey_element['choices']) if isinstance(choice_list, basestring): choice_list = choice_list.split('\n') if survey_element['variable'] in data: if data[survey_element['variable']] not in choice_list: errors.append( "Value %s for '%s' expected to be one of %s." % (data[survey_element['variable']], survey_element['variable'], choice_list)) return errors def survey_variable_validation(self, data): errors = [] if not self.survey_enabled: return errors if 'name' not in self.survey_spec: errors.append("'name' missing from survey spec.") if 'description' not in self.survey_spec: errors.append("'description' missing from survey spec.") for survey_element in self.survey_spec.get("spec", []): errors += self._survey_element_validation(survey_element, data) return errors