def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_ct = self.get_effective_slice_ct(kwargs) slice_event = bool(slice_ct > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif self.job_slice_count > 1 and (not prevent_slicing): # Unique case where JT was set to slice but hosts not available kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['job_slice_count'] = 1 elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: for idx in range(slice_ct): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job
def create_unified_job(self, **kwargs): prevent_slicing = kwargs.pop('_prevent_slicing', False) slice_event = bool(self.job_slice_count > 1 and (not prevent_slicing)) if slice_event: # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs[ '_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class( ) kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True elif prevent_slicing: kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields'].setdefault('job_slice_count', 1) job = super(JobTemplate, self).create_unified_job(**kwargs) if slice_event: try: wj_config = job.launch_config except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory for idx in range( min(self.job_slice_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job
def spawn_workflow_fan(depth, fan): w = WorkflowJobTemplate() w.name = "w2" w.save() node = WorkflowJobTemplateNode() node.unified_job_template_id = 10 node.workflow_job_template_id = w.id node.save() w.workflow_job_template_nodes.add(node) w.save() spawn_fan(node, w.id, fan, depth)
def spawn_workflow(num): w = WorkflowJobTemplate() w.name = "w1" w.save() prev_node = None for _ in range(num): node = WorkflowJobTemplateNode() # 10 was a job template I had created previously node.unified_job_template_id = 10 node.workflow_job_template_id = w.id node.save() if prev_node: prev_node.success_nodes.add(node) prev_node.save() else: w.workflow_job_template_nodes.add(node) w.save() prev_node = node
def workflow_job_template(organization): wjt = WorkflowJobTemplate(name='test-workflow_job_template', organization=organization) wjt.save() return wjt
def test_wfjt_unique_together_with_org(self, organization): wfjt1 = WorkflowJobTemplate(name='foo', organization=organization) wfjt1.save() wfjt2 = WorkflowJobTemplate(name='foo', organization=organization) with pytest.raises(ValidationError): wfjt2.validate_unique() wfjt2 = WorkflowJobTemplate(name='foo', organization=None) wfjt2.validate_unique()
def test_get_ask_mapping_integrity(): assert list(WorkflowJobTemplate.get_ask_mapping().keys()) == [ 'extra_vars', 'inventory', 'limit', 'scm_branch' ]
def workflow_job_template_unit(): return WorkflowJobTemplate(name='workflow')
def test_get_ask_mapping_integrity(): assert WorkflowJobTemplate.get_ask_mapping().keys() == [ 'extra_vars', 'inventory' ]