class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin, RelatedJobsMixin, WebhookTemplateMixin): SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] FIELDS_TO_PRESERVE_AT_COPY = [ 'labels', 'instance_groups', 'workflow_job_template_nodes', 'credentials', 'survey_spec' ] class Meta: app_label = 'main' organization = models.ForeignKey( 'Organization', blank=True, null=True, on_delete=models.SET_NULL, related_name='workflows', ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_scm_branch_on_launch = AskForField( blank=True, default=False, ) notification_templates_approvals = models.ManyToManyField( "NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_approvals') admin_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.workflow_admin_role' ]) execute_role = ImplicitRoleField(parent_role=[ 'admin_role', 'organization.execute_role', ]) read_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role', ]) approval_role = ImplicitRoleField(parent_role=[ 'organization.approval_role', 'admin_role', ]) @property def workflow_nodes(self): return self.workflow_job_template_nodes @classmethod def _get_unified_job_class(cls): return WorkflowJob @classmethod def _get_unified_jt_copy_names(cls): base_list = super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names() base_list.remove('labels') return (base_list | set([ 'survey_spec', 'survey_enabled', 'ask_variables_on_launch', 'organization' ])) def get_absolute_url(self, request=None): return reverse('api:workflow_job_template_detail', kwargs={'pk': self.pk}, request=request) @property def cache_timeout_blocked(self): if WorkflowJob.objects.filter(workflow_job_template=self, status__in=[ 'pending', 'waiting', 'running' ]).count() >= getattr( settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Workflow Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False @property def notification_templates(self): base_notification_templates = NotificationTemplate.objects.all() error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self ])) started_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_started__in=[ self ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self ])) approval_notification_templates = list( base_notification_templates.filter( workflowjobtemplate_notification_templates_for_approvals__in=[ self ])) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. organization))) started_notification_templates = set( started_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_started=self. organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. organization))) approval_notification_templates = set( approval_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_approvals=self. organization))) return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates), approvals=list(approval_notification_templates)) def create_unified_job(self, **kwargs): workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) workflow_job.copy_nodes_from_original(original=self) return workflow_job def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} errors_dict = {} # Handle all the fields that have prompting rules # NOTE: If WFJTs prompt for other things, this logic can be combined with jobs for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name == 'extra_vars': accepted_vars, rejected_vars, vars_errors = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars errors_dict.update(vars_errors) continue if field_name not in kwargs: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) if new_value == old_value: continue # no-op case: Counted as neither accepted or ignored elif getattr(self, ask_field_name): # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _( 'Field is not configured to prompt on launch.').format( field_name=field_name) return prompted_data, rejected_data, errors_dict def can_start_without_user_input(self): return not bool(self.variables_needed_to_start) def node_templates_missing(self): return [ node.pk for node in self.workflow_job_template_nodes.filter( unified_job_template__isnull=True).all() ] def node_prompts_rejected(self): node_list = [] for node in self.workflow_job_template_nodes.prefetch_related( 'unified_job_template').all(): ujt_obj = node.unified_job_template if ujt_obj is None: continue prompts_dict = node.prompts_dict() accepted_fields, ignored_fields, prompts_errors = ujt_obj._accept_or_ignore_job_kwargs( **prompts_dict) if prompts_errors: node_list.append(node.pk) return node_list ''' RelatedJobsMixin ''' def _get_related_jobs(self): return WorkflowJob.objects.filter(workflow_job_template=self)
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin): ''' An organization is the basic unit of multi-tenancy divisions ''' class Meta: app_label = 'main' ordering = ('name',) instance_groups = OrderedManyToManyField( 'InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership' ) galaxy_credentials = OrderedManyToManyField( 'Credential', blank=True, through='OrganizationGalaxyCredentialMembership', related_name='%(class)s_galaxy_credentials' ) max_hosts = models.PositiveIntegerField( blank=True, default=0, help_text=_('Maximum number of hosts allowed to be managed by this organization.'), ) notification_templates_approvals = models.ManyToManyField( "NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_approvals' ) default_environment = models.ForeignKey( 'ExecutionEnvironment', null=True, blank=True, default=None, on_delete=models.SET_NULL, related_name='+', help_text=_('The default execution environment for jobs run by this organization.'), ) admin_role = ImplicitRoleField( parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ) execute_role = ImplicitRoleField( parent_role='admin_role', ) project_admin_role = ImplicitRoleField( parent_role='admin_role', ) inventory_admin_role = ImplicitRoleField( parent_role='admin_role', ) credential_admin_role = ImplicitRoleField( parent_role='admin_role', ) workflow_admin_role = ImplicitRoleField( parent_role='admin_role', ) notification_admin_role = ImplicitRoleField( parent_role='admin_role', ) job_template_admin_role = ImplicitRoleField( parent_role='admin_role', ) execution_environment_admin_role = ImplicitRoleField( parent_role='admin_role', ) auditor_role = ImplicitRoleField( parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, ) member_role = ImplicitRoleField( parent_role=['admin_role'] ) read_role = ImplicitRoleField( parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role', 'approval_role', 'execution_environment_admin_role',], ) approval_role = ImplicitRoleField( parent_role='admin_role', ) def get_absolute_url(self, request=None): return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.non_polymorphic().filter(organization=self)
class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin): ''' A project represents a playbook git repo that can access a set of inventories ''' SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials'] FIELDS_TO_DISCARD_AT_COPY = ['local_path'] FIELDS_TRIGGER_UPDATE = frozenset(['scm_url', 'scm_branch', 'scm_type']) class Meta: app_label = 'main' ordering = ('id',) organization = models.ForeignKey( 'Organization', blank=True, null=True, on_delete=models.CASCADE, related_name='projects', ) scm_update_on_launch = models.BooleanField( default=False, help_text=_('Update the project when a job is launched that uses the project.'), ) scm_update_cache_timeout = models.PositiveIntegerField( default=0, blank=True, help_text=_('The number of seconds after the last project update ran that a new' 'project update will be launched as a job dependency.'), ) scm_revision = models.CharField( max_length=1024, blank=True, default='', editable=False, verbose_name=_('SCM Revision'), help_text=_('The last revision fetched by a project update'), ) playbook_files = JSONField( blank=True, default=[], editable=False, verbose_name=_('Playbook Files'), help_text=_('List of playbooks found in the project'), ) inventory_files = JSONField( blank=True, default=[], editable=False, verbose_name=_('Inventory Files'), help_text=_('Suggested list of content that could be Ansible inventory in the project'), ) admin_role = ImplicitRoleField(parent_role=[ 'organization.project_admin_role', 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ]) use_role = ImplicitRoleField( parent_role='admin_role', ) update_role = ImplicitRoleField( parent_role='admin_role', ) read_role = ImplicitRoleField(parent_role=[ 'organization.auditor_role', 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'use_role', 'update_role', ]) @classmethod def _get_unified_job_class(cls): return ProjectUpdate @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in ProjectOptions._meta.fields) | set( ['name', 'description', 'schedule'] ) def save(self, *args, **kwargs): new_instance = not bool(self.pk) pre_save_vals = getattr(self, '_prior_values_store', {}) # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. update_fields = kwargs.get('update_fields', []) skip_update = bool(kwargs.pop('skip_update', False)) # Create auto-generated local path if project uses SCM. if self.pk and self.scm_type and not self.local_path.startswith('_'): slug_name = slugify(str(self.name)).replace(u'-', u'_') self.local_path = u'_%d__%s' % (int(self.pk), slug_name) if 'local_path' not in update_fields: update_fields.append('local_path') # Do the actual save. super(Project, self).save(*args, **kwargs) if new_instance: update_fields=[] # Generate local_path for SCM after initial save (so we have a PK). if self.scm_type and not self.local_path.startswith('_'): update_fields.append('local_path') if update_fields: from awx.main.signals import disable_activity_stream with disable_activity_stream(): self.save(update_fields=update_fields) # If we just created a new project with SCM, start the initial update. # also update if certain fields have changed relevant_change = any( pre_save_vals.get(fd_name, None) != self._prior_values_store.get(fd_name, None) for fd_name in self.FIELDS_TRIGGER_UPDATE ) if (relevant_change or new_instance) and (not skip_update) and self.scm_type: self.update() def _get_current_status(self): if self.scm_type: if self.current_job and self.current_job.status: return self.current_job.status elif not self.last_job: return 'never updated' # inherit the child job status on failure elif self.last_job_failed: return self.last_job.status # Return the successful status else: return self.last_job.status elif not self.get_project_path(): return 'missing' else: return 'ok' def _get_last_job_run(self): if self.scm_type and self.last_job: return self.last_job.finished else: project_path = self.get_project_path() if project_path: try: mtime = os.path.getmtime(smart_str(project_path)) dt = datetime.datetime.fromtimestamp(mtime) return make_aware(dt, get_default_timezone()) except os.error: pass def _can_update(self): return bool(self.scm_type) def create_project_update(self, **kwargs): return self.create_unified_job(**kwargs) @property def cache_timeout_blocked(self): if not self.last_job_run: return False if (self.last_job_run + datetime.timedelta(seconds=self.scm_update_cache_timeout)) > now(): return True return False @property def needs_update_on_launch(self): if self.scm_type and self.scm_update_on_launch: if not self.last_job_run: return True if (self.last_job_run + datetime.timedelta(seconds=self.scm_update_cache_timeout)) <= now(): return True return False @property def notification_templates(self): base_notification_templates = NotificationTemplate.objects error_notification_templates = list(base_notification_templates .filter(unifiedjobtemplate_notification_templates_for_errors=self)) success_notification_templates = list(base_notification_templates .filter(unifiedjobtemplate_notification_templates_for_success=self)) any_notification_templates = list(base_notification_templates .filter(unifiedjobtemplate_notification_templates_for_any=self)) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set(error_notification_templates + list(base_notification_templates .filter(organization_notification_templates_for_errors=self.organization))) success_notification_templates = set(success_notification_templates + list(base_notification_templates .filter(organization_notification_templates_for_success=self.organization))) any_notification_templates = set(any_notification_templates + list(base_notification_templates .filter(organization_notification_templates_for_any=self.organization))) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) def get_absolute_url(self, request=None): return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.non_polymorphic().filter( models.Q(Job___project=self) | models.Q(ProjectUpdate___project=self) ) def delete(self, *args, **kwargs): path_to_delete = self.get_project_path(check_if_exists=False) r = super(Project, self).delete(*args, **kwargs) if self.scm_type and path_to_delete: # non-manual, concrete path from awx.main.tasks import delete_project_files delete_project_files.delay(path_to_delete) return r
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin): ''' A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. ''' SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')] class Meta: app_label = 'main' ordering = ('name',) host_config_key = models.CharField( max_length=1024, blank=True, default='', ) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField( blank=True, default=False, allows_field='job_tags' ) ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField( blank=True, default=False, allows_field='credentials' ) admin_role = ImplicitRoleField( parent_role=['project.organization.admin_role', 'inventory.organization.admin_role'] ) execute_role = ImplicitRoleField( parent_role=['admin_role'], ) read_role = ImplicitRoleField( parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return ['name', 'description', 'job_type', 'inventory', 'project', 'playbook', 'credentials', 'forks', 'schedule', 'limit', 'verbosity', 'job_tags', 'extra_vars', 'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled', 'labels', 'survey_passwords', 'allow_simultaneous', 'timeout', 'use_fact_cache', 'diff_mode',] @property def validation_errors(self): ''' Fields needed to start, which cannot be given on launch, invalid state. ''' validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),] if self.project is None: validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),] return validation_errors @property def resources_needed_to_start(self): return [fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd))] def create_job(self, **kwargs): ''' Create a new job based on this template. ''' return self.create_unified_job(**kwargs) def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): ''' Return whether job template can be used to start a new job without requiring any user input. ''' variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return (not prompting_needed and not self.passwords_needed_to_start and not variables_needed) def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) if getattr(self, '_deprecated_credential_launch', False): # TODO: remove this code branch when support for `extra_credentials` goes away new_value = set(kwargs[field_name]) else: new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif getattr(self, ask_field_name): # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _('Field is not configured to prompt on launch.').format(field_name=field_name) if 'prompts' not in exclude_errors and self.passwords_needed_to_start: errors_dict['passwords_needed_to_start'] = _( 'Saved launch configurations cannot provide passwords needed to start.') needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append(_("Job Template {} is missing or undefined.").format(resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() > getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error("Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list(base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[self, self.project])) success_notification_templates = list(base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[self, self.project])) any_notification_templates = list(base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any__in=[self, self.project])) # Get Organization NotificationTemplates if self.project is not None and self.project.organization is not None: error_notification_templates = set(error_notification_templates + list(base_notification_templates.filter( organization_notification_templates_for_errors=self.project.organization))) success_notification_templates = set(success_notification_templates + list(base_notification_templates.filter( organization_notification_templates_for_success=self.project.organization))) any_notification_templates = set(any_notification_templates + list(base_notification_templates.filter( organization_notification_templates_for_any=self.project.organization))) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates))
class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): ''' A credential contains information about how to talk to a remote resource Usually this is a SSH key location, and possibly an unlock password. If used with sudo, a sudo password should be set if required. ''' class Meta: app_label = 'main' ordering = ('name', ) unique_together = (('organization', 'name', 'credential_type')) PASSWORD_FIELDS = ['inputs'] FIELDS_TO_PRESERVE_AT_COPY = ['input_sources'] credential_type = models.ForeignKey( 'CredentialType', related_name='credentials', null=False, on_delete=models.CASCADE, help_text=_( 'Specify the type of credential you want to create. Refer ' 'to the Ansible Tower documentation for details on each type.')) managed_by_tower = models.BooleanField(default=False, editable=False) organization = models.ForeignKey( 'Organization', null=True, default=None, blank=True, on_delete=models.CASCADE, related_name='credentials', ) inputs = CredentialInputField( blank=True, default=dict, help_text=_( 'Enter inputs using either JSON or YAML syntax. ' 'Refer to the Ansible Tower documentation for example syntax.')) admin_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.credential_admin_role', ], ) use_role = ImplicitRoleField(parent_role=[ 'admin_role', ]) read_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'organization.auditor_role', 'use_role', 'admin_role', ]) @property def kind(self): return self.credential_type.namespace @property def cloud(self): return self.credential_type.kind == 'cloud' @property def kubernetes(self): return self.credential_type.kind == 'kubernetes' def get_absolute_url(self, request=None): return reverse('api:credential_detail', kwargs={'pk': self.pk}, request=request) # # TODO: the SSH-related properties below are largely used for validation # and for determining passwords necessary for job/ad-hoc launch # # These are SSH-specific; should we move them elsewhere? # @property def needs_ssh_password(self): return self.credential_type.kind == 'ssh' and self.inputs.get( 'password') == 'ASK' @property def has_encrypted_ssh_key_data(self): try: ssh_key_data = self.get_input('ssh_key_data') except AttributeError: return False try: pem_objects = validate_ssh_private_key(ssh_key_data) for pem_object in pem_objects: if pem_object.get('key_enc', False): return True except ValidationError: pass return False @property def needs_ssh_key_unlock(self): if self.credential_type.kind == 'ssh' and self.inputs.get( 'ssh_key_unlock') in ('ASK', ''): return self.has_encrypted_ssh_key_data return False @property def needs_become_password(self): return self.credential_type.kind == 'ssh' and self.inputs.get( 'become_password') == 'ASK' @property def needs_vault_password(self): return self.credential_type.kind == 'vault' and self.inputs.get( 'vault_password') == 'ASK' @property def passwords_needed(self): needed = [] for field in ('ssh_password', 'become_password', 'ssh_key_unlock'): if getattr(self, 'needs_%s' % field): needed.append(field) if self.needs_vault_password: if self.inputs.get('vault_id'): needed.append('vault_password.{}'.format( self.inputs.get('vault_id'))) else: needed.append('vault_password') return needed @cached_property def dynamic_input_fields(self): return [obj.input_field_name for obj in self.input_sources.all()] def _password_field_allows_ask(self, field): return field in self.credential_type.askable_fields def save(self, *args, **kwargs): self.PASSWORD_FIELDS = self.credential_type.secret_fields if self.pk: cred_before = Credential.objects.get(pk=self.pk) inputs_before = cred_before.inputs # Look up the currently persisted value so that we can replace # $encrypted$ with the actual DB-backed value for field in self.PASSWORD_FIELDS: if self.inputs.get(field) == '$encrypted$': self.inputs[field] = inputs_before[field] super(Credential, self).save(*args, **kwargs) def mark_field_for_save(self, update_fields, field): if 'inputs' not in update_fields: update_fields.append('inputs') def encrypt_field(self, field, ask): if field not in self.inputs: return None encrypted = encrypt_field(self, field, ask=ask) if encrypted: self.inputs[field] = encrypted elif field in self.inputs: del self.inputs[field] def display_inputs(self): field_val = self.inputs.copy() for k, v in field_val.items(): if force_text(v).startswith('$encrypted$'): field_val[k] = '$encrypted$' return field_val def unique_hash(self, display=False): ''' Credential exclusivity is not defined solely by the related credential type (due to vault), so this produces a hash that can be used to evaluate exclusivity ''' if display: type_alias = self.credential_type.name else: type_alias = self.credential_type_id if self.credential_type.kind == 'vault' and self.has_input('vault_id'): if display: fmt_str = '{} (id={})' else: fmt_str = '{}_{}' return fmt_str.format(type_alias, self.get_input('vault_id')) return str(type_alias) @staticmethod def unique_dict(cred_qs): ret = {} for cred in cred_qs: ret[cred.unique_hash()] = cred return ret def get_input(self, field_name, **kwargs): """ Get an injectable and decrypted value for an input field. Retrieves the value for a given credential input field name. Return values for secret input fields are decrypted. If the credential doesn't have an input value defined for the given field name, an AttributeError is raised unless a default value is provided. :param field_name(str): The name of the input field. :param default(optional[str]): A default return value to use. """ if self.credential_type.kind != 'external' and field_name in self.dynamic_input_fields: return self._get_dynamic_input(field_name) if field_name in self.credential_type.secret_fields: try: return decrypt_field(self, field_name) except AttributeError: for field in self.credential_type.inputs.get('fields', []): if field['id'] == field_name and 'default' in field: return field['default'] if 'default' in kwargs: return kwargs['default'] raise AttributeError if field_name in self.inputs: return self.inputs[field_name] if 'default' in kwargs: return kwargs['default'] for field in self.credential_type.inputs.get('fields', []): if field['id'] == field_name and 'default' in field: return field['default'] raise AttributeError(field_name) def has_input(self, field_name): if field_name in self.dynamic_input_fields: return True return field_name in self.inputs and self.inputs[field_name] not in ( '', None) def _get_dynamic_input(self, field_name): for input_source in self.input_sources.all(): if input_source.input_field_name == field_name: return input_source.get_input_value() else: raise ValueError( '{} is not a dynamic input field'.format(field_name))
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin): SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' organization = models.ForeignKey( 'Organization', blank=True, null=True, on_delete=models.SET_NULL, related_name='workflows', ) admin_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.admin_role' ]) execute_role = ImplicitRoleField(parent_role=['admin_role']) read_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'organization.auditor_role', 'execute_role', 'admin_role' ]) @property def workflow_nodes(self): return self.workflow_job_template_nodes @classmethod def _get_unified_job_class(cls): return WorkflowJob @classmethod def _get_unified_job_field_names(cls): return [ 'name', 'description', 'extra_vars', 'labels', 'survey_passwords', 'schedule', 'launch_type', 'allow_simultaneous' ] @classmethod def _get_unified_jt_copy_names(cls): base_list = super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names() base_list.remove('labels') return (base_list + ['survey_spec', 'survey_enabled', 'organization']) def get_absolute_url(self, request=None): return reverse('api:workflow_job_template_detail', kwargs={'pk': self.pk}, request=request) @property def cache_timeout_blocked(self): # TODO: don't allow running of job template if same workflow template running return False @property def notification_templates(self): base_notification_templates = NotificationTemplate.objects.all() error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self ])) any_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any__in=[self])) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) def create_unified_job(self, **kwargs): workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) workflow_job.copy_nodes_from_original(original=self) return workflow_job def _accept_or_ignore_job_kwargs(self, extra_vars=None, **kwargs): # Only accept allowed survey variables ignored_fields = {} prompted_fields = {} prompted_fields['extra_vars'] = {} ignored_fields['extra_vars'] = {} extra_vars = parse_yaml_or_json(extra_vars) if self.survey_enabled and self.survey_spec: survey_vars = [ question['variable'] for question in self.survey_spec.get('spec', []) ] for key in extra_vars: if key in survey_vars: prompted_fields['extra_vars'][key] = extra_vars[key] else: ignored_fields['extra_vars'][key] = extra_vars[key] else: prompted_fields['extra_vars'] = extra_vars return prompted_fields, ignored_fields def can_start_without_user_input(self): '''Return whether WFJT can be launched without survey passwords.''' return not bool(self.variables_needed_to_start or self.node_templates_missing() or self.node_prompts_rejected()) def node_templates_missing(self): return [ node.pk for node in self.workflow_job_template_nodes.filter( unified_job_template__isnull=True).all() ] def node_prompts_rejected(self): node_list = [] for node in self.workflow_job_template_nodes.prefetch_related( 'unified_job_template').all(): node_prompts_warnings = node.get_prompts_warnings() if node_prompts_warnings: node_list.append(node.pk) return node_list def user_copy(self, user): new_wfjt = self.copy_unified_jt() new_wfjt.copy_nodes_from_original(original=self, user=user) return new_wfjt
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin): """ A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. """ FIELDS_TO_PRESERVE_AT_COPY = ['labels', 'instance_groups', 'credentials', 'survey_spec'] FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' ordering = ('name',) job_type = models.CharField( max_length=64, choices=NEW_JOB_TYPE_CHOICES, default='run', ) host_config_key = prevent_search( models.CharField( max_length=1024, blank=True, default='', ) ) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags') ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials') ask_scm_branch_on_launch = AskForField(blank=True, default=False, allows_field='scm_branch') job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_("The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1."), ) admin_role = ImplicitRoleField(parent_role=['organization.job_template_admin_role']) execute_role = ImplicitRoleField( parent_role=['admin_role', 'organization.execute_role'], ) read_role = ImplicitRoleField( parent_role=[ 'organization.auditor_role', 'inventory.organization.auditor_role', # partial support for old inheritance via inventory 'execute_role', 'admin_role', ], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set( ['name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count', 'execution_environment'] ) @property def validation_errors(self): """ Fields needed to start, which cannot be given on launch, invalid state. """ validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [ _("Job Template must provide 'inventory' or allow prompting for it."), ] if self.project is None: validation_errors['project'] = [ _("Job Templates must have a project assigned."), ] return validation_errors @property def resources_needed_to_start(self): return [fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd))] def clean_forks(self): if settings.MAX_FORKS > 0 and self.forks > settings.MAX_FORKS: raise ValidationError(_(f'Maximum number of forks ({settings.MAX_FORKS}) exceeded.')) return self.forks def create_job(self, **kwargs): """ Create a new job based on this template. """ return self.create_unified_job(**kwargs) def get_effective_slice_ct(self, kwargs): actual_inventory = self.inventory if self.ask_inventory_on_launch and 'inventory' in kwargs: actual_inventory = kwargs['inventory'] if actual_inventory: return min(self.job_slice_count, actual_inventory.hosts.count()) else: return self.job_slice_count def save(self, *args, **kwargs): update_fields = kwargs.get('update_fields', []) # if project is deleted for some reason, then keep the old organization # to retain ownership for organization admins if self.project and self.project.organization_id != self.organization_id: self.organization_id = self.project.organization_id if 'organization' not in update_fields and 'organization_id' not in update_fields: update_fields.append('organization_id') return super(JobTemplate, self).save(*args, **kwargs) def validate_unique(self, exclude=None): """Custom over-ride for JT specifically because organization is inferred from project after full_clean is finished thus the organization field is not yet set when validation happens """ errors = [] for ut in JobTemplate.SOFT_UNIQUE_TOGETHER: kwargs = {'name': self.name} if self.project: kwargs['organization'] = self.project.organization_id else: kwargs['organization'] = None qs = JobTemplate.objects.filter(**kwargs) if self.pk: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.append('%s with this (%s) combination already exists.' % (JobTemplate.__name__, ', '.join(set(ut) - {'polymorphic_ctype'}))) if errors: raise ValidationError(errors) def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_ct = self.get_effective_slice_ct(kwargs) slice_event = bool(slice_ct > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif self.job_slice_count > 1 and (not prevent_slicing): # Unique case where JT was set to slice but hosts not available kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['job_slice_count'] = 1 elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: for idx in range(slice_ct): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): """ Return whether job template can be used to start a new job without requiring any user input. """ variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return not prompting_needed and not self.passwords_needed_to_start and not variables_needed def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {}) ) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif field_name == 'scm_branch' and old_value == '' and self.project and new_value == self.project.scm_branch: # special case of "not provided" for branches # job template does not provide branch, runs with default branch continue elif getattr(self, ask_field_name): # Special case where prompts can be rejected based on project setting if field_name == 'scm_branch': if not self.project: rejected_data[field_name] = new_value errors_dict[field_name] = _('Project is missing.') continue if kwargs['scm_branch'] != self.project.scm_branch and not self.project.allow_override: rejected_data[field_name] = new_value errors_dict[field_name] = _('Project does not allow override of branch.') continue # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _('Field is not configured to prompt on launch.') if 'prompts' not in exclude_errors and (not getattr(self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start: errors_dict['passwords_needed_to_start'] = _('Saved launch configurations cannot provide passwords needed to start.') needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append(_("Job Template {} is missing or undefined.").format(resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter(job_template=self, status__in=['pending', 'waiting', 'running']).count() >= getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10)) ) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list(base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_errors__in=[self, self.project])) started_notification_templates = list( base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_started__in=[self, self.project]) ) success_notification_templates = list( base_notification_templates.filter(unifiedjobtemplate_notification_templates_for_success__in=[self, self.project]) ) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set( error_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_errors=self.organization)) ) started_notification_templates = set( started_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_started=self.organization)) ) success_notification_templates = set( success_notification_templates + list(base_notification_templates.filter(organization_notification_templates_for_success=self.organization)) ) return dict(error=list(error_notification_templates), started=list(started_notification_templates), success=list(success_notification_templates)) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(unified_job_template=self)
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin): ''' An organization is the basic unit of multi-tenancy divisions ''' class Meta: app_label = 'main' ordering = ('name', ) instance_groups = OrderedManyToManyField( 'InstanceGroup', blank=True, through='OrganizationInstanceGroupMembership') max_hosts = models.PositiveIntegerField( blank=True, default=0, help_text= _('Maximum number of hosts allowed to be managed by this organization.' ), ) admin_role = ImplicitRoleField(parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ) execute_role = ImplicitRoleField(parent_role='admin_role', ) project_admin_role = ImplicitRoleField(parent_role='admin_role', ) inventory_admin_role = ImplicitRoleField(parent_role='admin_role', ) credential_admin_role = ImplicitRoleField(parent_role='admin_role', ) workflow_admin_role = ImplicitRoleField(parent_role='admin_role', ) notification_admin_role = ImplicitRoleField(parent_role='admin_role', ) job_template_admin_role = ImplicitRoleField(parent_role='admin_role', ) auditor_role = ImplicitRoleField(parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, ) member_role = ImplicitRoleField(parent_role=['admin_role']) read_role = ImplicitRoleField(parent_role=[ 'member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role', ], ) def get_absolute_url(self, request=None): return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request) ''' RelatedJobsMixin ''' def _get_related_jobs(self): project_ids = self.projects.all().values_list('id') return UnifiedJob.objects.non_polymorphic().filter( Q(Job___project__in=project_ids) | Q(ProjectUpdate___project__in=project_ids) | Q(InventoryUpdate___inventory_source__inventory__organization=self) )
class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): ''' A credential contains information about how to talk to a remote resource Usually this is a SSH key location, and possibly an unlock password. If used with sudo, a sudo password should be set if required. ''' class Meta: app_label = 'main' ordering = ('name', ) unique_together = (('organization', 'name', 'credential_type')) PASSWORD_FIELDS = ['inputs'] credential_type = models.ForeignKey( 'CredentialType', related_name='credentials', null=False, help_text=_( 'Specify the type of credential you want to create. Refer ' 'to the Ansible Tower documentation for details on each type.')) organization = models.ForeignKey( 'Organization', null=True, default=None, blank=True, on_delete=models.CASCADE, related_name='credentials', ) inputs = CredentialInputField( blank=True, default={}, help_text=_('Enter inputs using either JSON or YAML syntax. Use the ' 'radio button to toggle between the two. Refer to the ' 'Ansible Tower documentation for example syntax.')) admin_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.admin_role', ], ) use_role = ImplicitRoleField(parent_role=[ 'admin_role', ]) read_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'organization.auditor_role', 'use_role', 'admin_role', ]) def __getattr__(self, item): if item != 'inputs': if item in V1Credential.FIELDS: return self.inputs.get(item, V1Credential.FIELDS[item].default) elif item in self.inputs: return self.inputs[item] raise AttributeError(item) def __setattr__(self, item, value): if item in V1Credential.FIELDS and item in self.credential_type.defined_fields: if value: self.inputs[item] = value elif item in self.inputs: del self.inputs[item] return super(Credential, self).__setattr__(item, value) @property def kind(self): # TODO 3.3: remove the need for this helper property by removing its # usage throughout the codebase type_ = self.credential_type if type_.kind != 'cloud': return type_.kind for field in V1Credential.KIND_CHOICES: kind, name = field if name == type_.name: return kind @property def cloud(self): return self.credential_type.kind == 'cloud' def get_absolute_url(self, request=None): return reverse('api:credential_detail', kwargs={'pk': self.pk}, request=request) # # TODO: the SSH-related properties below are largely used for validation # and for determining passwords necessary for job/ad-hoc launch # # These are SSH-specific; should we move them elsewhere? # @property def needs_ssh_password(self): return self.credential_type.kind == 'ssh' and self.password == 'ASK' @property def has_encrypted_ssh_key_data(self): if self.pk: ssh_key_data = decrypt_field(self, 'ssh_key_data') else: ssh_key_data = self.ssh_key_data try: pem_objects = validate_ssh_private_key(ssh_key_data) for pem_object in pem_objects: if pem_object.get('key_enc', False): return True except ValidationError: pass return False @property def needs_ssh_key_unlock(self): if self.credential_type.kind == 'ssh' and self.ssh_key_unlock in ( 'ASK', ''): return self.has_encrypted_ssh_key_data return False @property def needs_become_password(self): return self.credential_type.kind == 'ssh' and self.become_password == 'ASK' @property def needs_vault_password(self): return self.credential_type.kind == 'vault' and self.vault_password == 'ASK' @property def passwords_needed(self): needed = [] for field in ('ssh_password', 'become_password', 'ssh_key_unlock'): if getattr(self, 'needs_%s' % field): needed.append(field) if self.needs_vault_password: if self.inputs.get('vault_id'): needed.append('vault_password.{}'.format( self.inputs.get('vault_id'))) else: needed.append('vault_password') return needed def _password_field_allows_ask(self, field): return field in self.credential_type.askable_fields def save(self, *args, **kwargs): self.PASSWORD_FIELDS = self.credential_type.secret_fields if self.pk: cred_before = Credential.objects.get(pk=self.pk) inputs_before = cred_before.inputs # Look up the currently persisted value so that we can replace # $encrypted$ with the actual DB-backed value for field in self.PASSWORD_FIELDS: if self.inputs.get(field) == '$encrypted$': self.inputs[field] = inputs_before[field] super(Credential, self).save(*args, **kwargs) def encrypt_field(self, field, ask): encrypted = encrypt_field(self, field, ask=ask) if encrypted: self.inputs[field] = encrypted elif field in self.inputs: del self.inputs[field] def mark_field_for_save(self, update_fields, field): if field in self.credential_type.secret_fields: # If we've encrypted a v1 field, we actually want to persist # self.inputs field = 'inputs' super(Credential, self).mark_field_for_save(update_fields, field) def display_inputs(self): field_val = self.inputs.copy() for k, v in field_val.items(): if force_text(v).startswith('$encrypted$'): field_val[k] = '$encrypted$' return field_val def unique_hash(self, display=False): ''' Credential exclusivity is not defined solely by the related credential type (due to vault), so this produces a hash that can be used to evaluate exclusivity ''' if display: type_alias = self.credential_type.name else: type_alias = self.credential_type_id if self.kind == 'vault' and self.inputs.get('vault_id', None): if display: fmt_str = '{} (id={})' else: fmt_str = '{}_{}' return fmt_str.format(type_alias, self.inputs.get('vault_id')) return str(type_alias) @staticmethod def unique_dict(cred_qs): ret = {} for cred in cred_qs: ret[cred.unique_hash()] = cred return ret
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin): ''' A job template is a reusable job definition for applying a project (with playbook) to an inventory source with a given credential. ''' FIELDS_TO_PRESERVE_AT_COPY = [ 'labels', 'instance_groups', 'credentials', 'survey_spec' ] FIELDS_TO_DISCARD_AT_COPY = ['vault_credential', 'credential'] SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')] class Meta: app_label = 'main' ordering = ('name', ) host_config_key = prevent_search( models.CharField( max_length=1024, blank=True, default='', )) ask_diff_mode_on_launch = AskForField( blank=True, default=False, ) ask_limit_on_launch = AskForField( blank=True, default=False, ) ask_tags_on_launch = AskForField(blank=True, default=False, allows_field='job_tags') ask_skip_tags_on_launch = AskForField( blank=True, default=False, ) ask_job_type_on_launch = AskForField( blank=True, default=False, ) ask_verbosity_on_launch = AskForField( blank=True, default=False, ) ask_inventory_on_launch = AskForField( blank=True, default=False, ) ask_credential_on_launch = AskForField(blank=True, default=False, allows_field='credentials') job_slice_count = models.PositiveIntegerField( blank=True, default=1, help_text=_( "The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1." ), ) admin_role = ImplicitRoleField(parent_role=[ 'project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role' ]) execute_role = ImplicitRoleField(parent_role=[ 'admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role' ], ) read_role = ImplicitRoleField(parent_role=[ 'project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role' ], ) @classmethod def _get_unified_job_class(cls): return Job @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set([ 'name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials', 'job_slice_number', 'job_slice_count' ]) @property def validation_errors(self): ''' Fields needed to start, which cannot be given on launch, invalid state. ''' validation_errors = {} if self.inventory is None and not self.ask_inventory_on_launch: validation_errors['inventory'] = [ _("Job Template must provide 'inventory' or allow prompting for it." ), ] if self.project is None: validation_errors['project'] = [ _("Job Templates must have a project assigned."), ] return validation_errors @property def resources_needed_to_start(self): return [ fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd)) ] def create_job(self, **kwargs): ''' Create a new job based on this template. ''' return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_event = bool(self.job_slice_count > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: try: wj_config = job.launch_config except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory for idx in xrange( min(self.job_slice_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) def can_start_without_user_input(self, callback_extra_vars=None): ''' Return whether job template can be used to start a new job without requiring any user input. ''' variables_needed = False if callback_extra_vars: extra_vars_dict = parse_yaml_or_json(callback_extra_vars) for var in self.variables_needed_to_start: if var not in extra_vars_dict: variables_needed = True break elif self.variables_needed_to_start: variables_needed = True prompting_needed = False # The behavior of provisioning callback should mimic # that of job template launch, so prompting_needed should # not block a provisioning callback from creating/launching jobs. if callback_extra_vars is None: for ask_field_name in set(self.get_ask_mapping().values()): if getattr(self, ask_field_name): prompting_needed = True break return (not prompting_needed and not self.passwords_needed_to_start and not variables_needed) def _accept_or_ignore_job_kwargs(self, **kwargs): exclude_errors = kwargs.pop('_exclude_errors', []) prompted_data = {} rejected_data = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {}), _exclude_errors=exclude_errors, extra_passwords=kwargs.get('survey_passwords', {})) if accepted_vars: prompted_data['extra_vars'] = accepted_vars if rejected_vars: rejected_data['extra_vars'] = rejected_vars # Handle all the other fields that follow the simple prompting rule for field_name, ask_field_name in self.get_ask_mapping().items(): if field_name not in kwargs or field_name == 'extra_vars' or kwargs[ field_name] is None: continue new_value = kwargs[field_name] old_value = getattr(self, field_name) field = self._meta.get_field(field_name) if isinstance(field, models.ManyToManyField): old_value = set(old_value.all()) if getattr(self, '_deprecated_credential_launch', False): # TODO: remove this code branch when support for `extra_credentials` goes away new_value = set(kwargs[field_name]) else: new_value = set(kwargs[field_name]) - old_value if not new_value: continue if new_value == old_value: # no-op case: Fields the same as template's value # counted as neither accepted or ignored continue elif getattr(self, ask_field_name): # accepted prompt prompted_data[field_name] = new_value else: # unprompted - template is not configured to accept field on launch rejected_data[field_name] = new_value # Not considered an error for manual launch, to support old # behavior of putting them in ignored_fields and launching anyway if 'prompts' not in exclude_errors: errors_dict[field_name] = _( 'Field is not configured to prompt on launch.').format( field_name=field_name) if ('prompts' not in exclude_errors and (not getattr(self, 'ask_credential_on_launch', False)) and self.passwords_needed_to_start): errors_dict['passwords_needed_to_start'] = _( 'Saved launch configurations cannot provide passwords needed to start.' ) needed = self.resources_needed_to_start if needed: needed_errors = [] for resource in needed: if resource in prompted_data: continue needed_errors.append( _("Job Template {} is missing or undefined.").format( resource)) if needed_errors: errors_dict['resources_needed_to_start'] = needed_errors return prompted_data, rejected_data, errors_dict @property def cache_timeout_blocked(self): if Job.objects.filter( job_template=self, status__in=[ 'pending', 'waiting', 'running' ]).count() > getattr(settings, 'SCHEDULE_MAX_JOBS', 10): logger.error( "Job template %s could not be started because there are more than %s other jobs from that template waiting to run" % (self.name, getattr(settings, 'SCHEDULE_MAX_JOBS', 10))) return True return False def _can_update(self): return self.can_start_without_user_input() @property def notification_templates(self): # Return all notification_templates defined on the Job Template, on the Project, and on the Organization for each trigger type # TODO: Currently there is no org fk on project so this will need to be added once that is # available after the rbac pr base_notification_templates = NotificationTemplate.objects error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self, self.project ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self, self.project ])) any_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any__in=[ self, self.project ])) # Get Organization NotificationTemplates if self.project is not None and self.project.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. project.organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. project.organization))) any_notification_templates = set(any_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_any=self.project. organization))) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) ''' RelatedJobsMixin ''' def _get_related_jobs(self): return UnifiedJob.objects.filter(unified_job_template=self)
class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin): ''' A project represents a playbook git repo that can access a set of inventories ''' SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' ordering = ('id', ) organization = models.ForeignKey( 'Organization', blank=True, null=True, on_delete=models.CASCADE, related_name='projects', ) scm_delete_on_next_update = models.BooleanField( default=False, editable=False, ) scm_update_on_launch = models.BooleanField( default=False, help_text=_( 'Update the project when a job is launched that uses the project.' ), ) scm_update_cache_timeout = models.PositiveIntegerField( default=0, blank=True, help_text=_( 'The number of seconds after the last project update ran that a new' 'project update will be launched as a job dependency.'), ) scm_revision = models.CharField( max_length=1024, blank=True, default='', editable=False, verbose_name=_('SCM Revision'), help_text=_('The last revision fetched by a project update'), ) playbook_files = JSONField( blank=True, default=[], editable=False, verbose_name=_('Playbook Files'), help_text=_('List of playbooks found in the project'), ) inventory_files = JSONField( blank=True, default=[], editable=False, verbose_name=_('Inventory Files'), help_text= _('Suggested list of content that could be Ansible inventory in the project' ), ) admin_role = ImplicitRoleField(parent_role=[ 'organization.admin_role', 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ]) use_role = ImplicitRoleField(parent_role='admin_role', ) update_role = ImplicitRoleField(parent_role='admin_role', ) read_role = ImplicitRoleField(parent_role=[ 'organization.auditor_role', 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'use_role', 'update_role', ]) @classmethod def _get_unified_job_class(cls): return ProjectUpdate @classmethod def _get_unified_job_field_names(cls): return [ 'name', 'description', 'local_path', 'scm_type', 'scm_url', 'scm_branch', 'scm_clean', 'scm_delete_on_update', 'credential', 'schedule', 'timeout', 'launch_type', ] def save(self, *args, **kwargs): new_instance = not bool(self.pk) # If update_fields has been specified, add our field names to it, # if it hasn't been specified, then we're just doing a normal save. update_fields = kwargs.get('update_fields', []) skip_update = bool(kwargs.pop('skip_update', False)) # Check if scm_type or scm_url changes. if self.pk: project_before = self.__class__.objects.get(pk=self.pk) if project_before.scm_type != self.scm_type or project_before.scm_url != self.scm_url: self.scm_delete_on_next_update = True if 'scm_delete_on_next_update' not in update_fields: update_fields.append('scm_delete_on_next_update') # Create auto-generated local path if project uses SCM. if self.pk and self.scm_type and not self.local_path.startswith('_'): slug_name = slugify(unicode(self.name)).replace(u'-', u'_') self.local_path = u'_%d__%s' % (int(self.pk), slug_name) if 'local_path' not in update_fields: update_fields.append('local_path') # Do the actual save. super(Project, self).save(*args, **kwargs) if new_instance: update_fields = [] # Generate local_path for SCM after initial save (so we have a PK). if self.scm_type and not self.local_path.startswith('_'): update_fields.append('local_path') if update_fields: from awx.main.signals import disable_activity_stream with disable_activity_stream(): self.save(update_fields=update_fields) # If we just created a new project with SCM, start the initial update. if new_instance and self.scm_type and not skip_update: self.update() def _get_current_status(self): if self.scm_type: if self.current_job and self.current_job.status: return self.current_job.status elif not self.last_job: return 'never updated' # inherit the child job status on failure elif self.last_job_failed: return self.last_job.status # Return the successful status else: return self.last_job.status elif not self.get_project_path(): return 'missing' else: return 'ok' def _get_last_job_run(self): if self.scm_type and self.last_job: return self.last_job.finished else: project_path = self.get_project_path() if project_path: try: mtime = os.path.getmtime(smart_str(project_path)) dt = datetime.datetime.fromtimestamp(mtime) return make_aware(dt, get_default_timezone()) except os.error: pass def _can_update(self): return bool(self.scm_type) def _update_unified_job_kwargs(self, create_kwargs, kwargs): ''' :param create_kwargs: key-worded arguments to be updated and later used for creating unified job. :type create_kwargs: dict :param kwargs: request parameters used to override unified job template fields with runtime values. :type kwargs: dict :return: modified create_kwargs. :rtype: dict ''' if self.scm_delete_on_next_update: create_kwargs['scm_delete_on_update'] = True return create_kwargs def create_project_update(self, **kwargs): return self.create_unified_job(**kwargs) @property def cache_timeout_blocked(self): if not self.last_job_run: return False if (self.last_job_run + datetime.timedelta( seconds=self.scm_update_cache_timeout)) > now(): return True return False @property def needs_update_on_launch(self): if self.scm_type and self.scm_update_on_launch: if not self.last_job_run: return True if (self.last_job_run + datetime.timedelta( seconds=self.scm_update_cache_timeout)) <= now(): return True return False @property def notification_templates(self): base_notification_templates = NotificationTemplate.objects error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors=self)) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success=self)) any_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any=self)) # Get Organization NotificationTemplates if self.organization is not None: error_notification_templates = set( error_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_errors=self. organization))) success_notification_templates = set( success_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_success=self. organization))) any_notification_templates = set(any_notification_templates + list( base_notification_templates.filter( organization_notification_templates_for_any=self. organization))) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) def get_absolute_url(self, request=None): return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin): SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')] class Meta: app_label = 'main' organization = models.ForeignKey( 'Organization', blank=True, null=True, on_delete=models.SET_NULL, related_name='workflows', ) admin_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, 'organization.admin_role' ]) execute_role = ImplicitRoleField(parent_role=['admin_role']) read_role = ImplicitRoleField(parent_role=[ 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, 'organization.auditor_role', 'execute_role', 'admin_role' ]) @property def workflow_nodes(self): return self.workflow_job_template_nodes @classmethod def _get_unified_job_class(cls): return WorkflowJob @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in WorkflowJobOptions._meta.fields) | set( ['name', 'description', 'schedule', 'survey_passwords', 'labels']) @classmethod def _get_unified_jt_copy_names(cls): base_list = super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names() base_list.remove('labels') return (base_list | set([ 'survey_spec', 'survey_enabled', 'ask_variables_on_launch', 'organization' ])) def get_absolute_url(self, request=None): return reverse('api:workflow_job_template_detail', kwargs={'pk': self.pk}, request=request) @property def cache_timeout_blocked(self): # TODO: don't allow running of job template if same workflow template running return False @property def notification_templates(self): base_notification_templates = NotificationTemplate.objects.all() error_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_errors__in=[ self ])) success_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_success__in=[ self ])) any_notification_templates = list( base_notification_templates.filter( unifiedjobtemplate_notification_templates_for_any__in=[self])) return dict(error=list(error_notification_templates), success=list(success_notification_templates), any=list(any_notification_templates)) def create_unified_job(self, **kwargs): workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs) workflow_job.copy_nodes_from_original(original=self) return workflow_job def _accept_or_ignore_job_kwargs(self, _exclude_errors=(), **kwargs): prompted_fields = {} rejected_fields = {} accepted_vars, rejected_vars, errors_dict = self.accept_or_ignore_variables( kwargs.get('extra_vars', {})) if accepted_vars: prompted_fields['extra_vars'] = accepted_vars if rejected_vars: rejected_fields['extra_vars'] = rejected_vars # WFJTs do not behave like JTs, it can not accept inventory, credential, etc. bad_kwargs = kwargs.copy() bad_kwargs.pop('extra_vars', None) if bad_kwargs: rejected_fields.update(bad_kwargs) for field in bad_kwargs: errors_dict[field] = _( 'Field is not allowed for use in workflows.') return prompted_fields, rejected_fields, errors_dict def can_start_without_user_input(self): return not bool(self.variables_needed_to_start or self.node_templates_missing() or self.node_prompts_rejected()) def node_templates_missing(self): return [ node.pk for node in self.workflow_job_template_nodes.filter( unified_job_template__isnull=True).all() ] def node_prompts_rejected(self): node_list = [] for node in self.workflow_job_template_nodes.prefetch_related( 'unified_job_template').all(): ujt_obj = node.unified_job_template if ujt_obj is None: continue prompts_dict = node.prompts_dict() accepted_fields, ignored_fields, prompts_errors = ujt_obj._accept_or_ignore_job_kwargs( **prompts_dict) if prompts_errors: node_list.append(node.pk) return node_list def user_copy(self, user): new_wfjt = self.copy_unified_jt() new_wfjt.copy_nodes_from_original(original=self, user=user) return new_wfjt