def process_pipeline_args(self, pipeline_args: JobVariant): # our step depends on dependency descriptor step component_descriptor_step = pipeline_args.step('component_descriptor') self.image_scan_step._add_dependency(component_descriptor_step) for trait_name in self.trait.trait_depends(): if not pipeline_args.has_trait(trait_name): raise ModelValidationError( f'dependency towards absent trait: {trait_name}') depended_on_trait = pipeline_args.trait(trait_name) # XXX refactor Trait/TraitTransformer transformer = depended_on_trait.transformer() # XXX step-injection may have (unintended) side-effects :-/ depended_on_step_names = { step.name for step in transformer.inject_steps() } for step in pipeline_args.steps(): if not step.name in depended_on_step_names: continue self.image_scan_step._add_dependency(step) # prevent cyclic dependencies (from auto-injected depends) if self.image_scan_step.name in step.depends(): step._remove_dependency(self.image_scan_step)
def process_pipeline_args(self, pipeline_args: JobVariant): main_repo = pipeline_args.main_repository() prepare_step = pipeline_args.step('prepare') publish_step = pipeline_args.step('publish') image_name = main_repo.branch() + '-image' tag_name = main_repo.branch() + '-tag' # configure prepare step's outputs (consumed by publish step) prepare_step.add_output('image_path', image_name) prepare_step.add_output('tag_path', tag_name) # configure publish step's inputs (produced by prepare step) publish_step.add_input('image_path', image_name) publish_step.add_input('tag_path', tag_name) input_step_names = set() for image_descriptor in self.trait.dockerimages(): # todo: image-specific prepare steps input_step_names.update(image_descriptor.input_steps()) for input_step_name in input_step_names: input_step = pipeline_args.step(input_step_name) input_name = input_step.output_dir() prepare_step.add_input(input_name, input_name) # prepare-step depdends on every other step, except publish and release # TODO: do not hard-code knowledge about 'release' step for step in pipeline_args.steps(): if step.name in ['publish', 'release']: continue prepare_step._add_dependency(step)
def _create_repos(self, pipeline_def: JobVariant, raw_dict): pipeline_def._repos_dict = {} if 'repo' in raw_dict: # special case: repo singleton (will vanish once we mv definitions into component-repos) repo_dict = raw_dict['repo'] name = 'source' if 'name' not in repo_dict else repo_dict['name'] pipeline_def._repos_dict[name] = RepositoryConfig( raw_dict=repo_dict, logical_name=name, qualifier=None, is_main_repo=True) pipeline_def._main_repository_name = name if 'repos' in raw_dict: for repo_dict in raw_dict['repos']: if not 'cfg_name' in repo_dict: github_cfg = self.cfg_set.github() else: github_cfg = self.cfg_set.github(repo_dict['cfg_name']) hostname = github_cfg.hostname() repo_dict['hostname'] = hostname pipeline_def._repos_dict.update({ repo_dict['name']: RepositoryConfig(logical_name=repo_dict['name'], raw_dict=repo_dict, is_main_repo=False) })
def examinee(self, name='Dont care'): variant = JobVariant( name='Dont care', raw_dict={}, resource_registry={} ) # set steps dict, usually done by factory. variant._steps_dict = {} return variant
def process_pipeline_args(self, pipeline_args: JobVariant): # our steps depends on dependency descriptor step component_descriptor_step = pipeline_args.step( concourse.model.traits.component_descriptor.DEFAULT_COMPONENT_DESCRIPTOR_STEP_NAME ) if self.trait.protecode(): self.image_scan_step._add_dependency(component_descriptor_step) if self.trait.clam_av(): self.malware_scan_step._add_dependency(component_descriptor_step) for trait_name in self.trait.trait_depends(): if not pipeline_args.has_trait(trait_name): raise ModelValidationError(f'dependency towards absent trait: {trait_name}') depended_on_trait = pipeline_args.trait(trait_name) # XXX refactor Trait/TraitTransformer transformer = depended_on_trait.transformer() # XXX step-injection may have (unintended) side-effects :-/ depended_on_step_names = {step.name for step in transformer.inject_steps()} for step in pipeline_args.steps(): if not step.name in depended_on_step_names: continue if self.trait.protecode(): self.image_scan_step._add_dependency(step) # prevent cyclic dependencies (from auto-injected depends) if self.image_scan_step.name in step.depends(): step._remove_dependency(self.image_scan_step) if self.trait.clam_av(): self.malware_scan_step._add_dependency(step) # prevent cyclic dependencies (from auto-injected depends) if self.malware_scan_step.name in step.depends(): step._remove_dependency(self.malware_scan_step)
def process_pipeline_args(self, pipeline_args: JobVariant): # we depend on all other steps for step in pipeline_args.steps(): self.release_step._add_dependency(step) # a 'release job' should only be triggered automatically if explicitly configured main_repo = pipeline_args.main_repository() if main_repo: if 'trigger' not in pipeline_args.raw['repo']: main_repo._trigger = False
def process_pipeline_args(self, pipeline_args: JobVariant): # All steps depend on meta step and receive an input from it for step in pipeline_args.steps(): if step == self.meta_step: continue step._add_dependency(self.meta_step) step.add_input(name=DIR_NAME, variable_name=ENV_VAR_NAME) if pipeline_args.has_trait('version'): # All steps depend on version. Remove ourself to avoid circular dependency version_step = pipeline_args.step('version') self.meta_step._remove_dependency(version_step) self.meta_step.remove_input('version_path')
def process_pipeline_args(self, pipeline_args: JobVariant): # all steps depend from us and may consume our output for step in pipeline_args.steps(): if step == self.version_step: continue step._add_dependency(self.version_step) step.add_input(variable_name=ENV_VAR_NAME, name=DIR_NAME)
def _create_variant(self, raw_dict, variant_name, resource_registry) -> JobVariant: variant = JobVariant(name=variant_name, raw_dict=raw_dict, resource_registry=resource_registry) # build steps variant._steps_dict = self._create_build_steps(raw_dict) # traits variant._traits_dict = self._create_traits(raw_dict, variant_name) self._create_repos(variant, raw_dict) self._inject_publish_repos(variant) return variant
def process_pipeline_args(self, pipeline_args: JobVariant): # all steps depend from us and may consume our output for step in pipeline_args.steps(): if step == self.version_step: continue step._add_dependency(self.version_step) step.add_input(name='version_path', variable_name='managed-version')
def setUp(self): self.tmp_dir = tempfile.TemporaryDirectory() self.meta_dir = os.path.join(self.tmp_dir.name, 'meta') os.mkdir(self.meta_dir) test_utils.populate_meta_dir(self.meta_dir) self.on_error_dir = os.path.join(self.tmp_dir.name, 'on_error_dir') os.mkdir(self.on_error_dir) self.job_step = PipelineStep( name='step1', is_synthetic=False, notification_policy=StepNotificationPolicy.NOTIFY_PULL_REQUESTS, script_type=ScriptType.BOURNE_SHELL, raw_dict={}, ) self.job_step._notifications_cfg = NotificationCfgSet('default', {}) resource_registry = ResourceRegistry() meta_resource_identifier = ResourceIdentifier(type_name='meta', base_name='a_job') meta_resource = Resource(resource_identifier=meta_resource_identifier, raw_dict={}) resource_registry.add_resource(meta_resource) self.job_variant = JobVariant(name='a_job', raw_dict={}, resource_registry=resource_registry) # Set a main repository manually test_repo_logical_name = 'test-repository' self.job_variant._repos_dict = {} self.job_variant._repos_dict[ test_repo_logical_name] = RepositoryConfig( raw_dict={ 'branch': 'master', 'hostname': 'github.foo.bar', 'path': 'test/repo' }, logical_name=test_repo_logical_name, qualifier=None, is_main_repo=True) self.job_variant._main_repository_name = test_repo_logical_name self.job_variant._traits_dict = {} self.cfg_set = MagicMock() self.github_cfg = MagicMock() self.github_cfg.name = MagicMock(return_value='github_cfg') self.email_cfg = MagicMock() self.email_cfg.name = MagicMock(return_value='email_cfg') self.cfg_set.github = MagicMock(return_value=self.github_cfg) self.cfg_set.email = MagicMock(return_value=self.email_cfg) ctx_repo_mock = MagicMock(return_value='repo_url') ctx_repo_mock.base_url = MagicMock(return_value='repo_url') self.cfg_set.ctx_repository = MagicMock(return_value=ctx_repo_mock) self.render_step = step_def('notification') self.old_cwd = os.getcwd() os.chdir(self.tmp_dir.name)
def process_pipeline_args(self, pipeline_args: JobVariant): # our step depends on dependendency descriptor step component_descriptor_step = pipeline_args.step('component_descriptor') self.update_component_deps_step._add_dependency(component_descriptor_step) upstream_component_name = self.trait.upstream_component_name() if upstream_component_name: self.update_component_deps_step.variables()['UPSTREAM_COMPONENT_NAME'] = '"{cn}"'.format( cn=upstream_component_name, )
def process_pipeline_args(self, pipeline_args: JobVariant): repo_name = pipeline_args.main_repository().logical_name() # convert main-repo to PR pr_repo = pipeline_args.pr_repository(repo_name) pr_repo._trigger = True # patch-in the updated repository pipeline_args._repos_dict[repo_name] = pr_repo # patch the configured steps so that they do not report their status back to PRs for step_name in self.trait.disable_status_report(): if not pipeline_args.has_step(step_name): raise model.base.ModelValidationError( f"Reporting to pull requests was disabled for step '{step_name}', but no step " f"'{step_name}' was found in job '{pipeline_args.variant_name}'" ) step = pipeline_args.step(step_name) step._notification_policy = StepNotificationPolicy.NO_NOTIFICATION
def process_pipeline_args(self, pipeline_args: JobVariant): main_repo = pipeline_args.main_repository() prepare_step = pipeline_args.step('prepare') if self.trait.oci_builder() is OciBuilder.CONCOURSE_IMAGE_RESOURCE: publish_step = pipeline_args.step('publish') image_name = main_repo.branch() + '-image' tag_name = main_repo.branch() + '-tag' # configure prepare step's outputs (consumed by publish step) prepare_step.add_output(variable_name=IMAGE_ENV_VAR_NAME, name=image_name) prepare_step.add_output(variable_name=TAG_ENV_VAR_NAME, name=tag_name) if self.trait.oci_builder() is OciBuilder.CONCOURSE_IMAGE_RESOURCE: # configure publish step's inputs (produced by prepare step) publish_step.add_input(variable_name=IMAGE_ENV_VAR_NAME, name=image_name) publish_step.add_input(variable_name=TAG_ENV_VAR_NAME, name=tag_name) for build_step in self._build_steps: build_step.add_input(variable_name=IMAGE_ENV_VAR_NAME, name=image_name) input_step_names = set() for image_descriptor in self.trait.dockerimages(): # todo: image-specific prepare steps input_step_names.update(image_descriptor.input_steps()) for input_step_name in input_step_names: input_step = pipeline_args.step(input_step_name) input_name = input_step.output_dir() prepare_step.add_input(input_name, input_name) # prepare-step depdends on every other step, except publish and release # TODO: do not hard-code knowledge about 'release' step for step in pipeline_args.steps(): if step.name in ['publish', 'release', 'build_oci_image']: continue if step.name.startswith('build_oci_image'): continue prepare_step._add_dependency(step)
def setUp(self): self.pipeline_args = JobVariant( name='a_job', raw_dict={}, resource_registry=object(), ) self.pipeline_args._steps_dict = {} self.repo_mock = MagicMock() self.repo_mock.repo_hostname = MagicMock(return_value='github.com') self.repo_mock.repo_path = MagicMock(return_value='org/repo') self.pipeline_args.main_repository = MagicMock( return_value=self.repo_mock)
def process_pipeline_args(self, pipeline_args: JobVariant): cd_trait = pipeline_args.trait('component_descriptor') cd_step = pipeline_args.step(cd_trait.step_name()) self.release_step._add_dependency(cd_step)
def process_pipeline_args(self, pipeline_args: JobVariant): # our step depends on dependency descriptor step component_descriptor_step = pipeline_args.step('component_descriptor') self.source_scan_step._add_dependency(component_descriptor_step)
def process_pipeline_args(self, pipeline_args: JobVariant): # all steps depend from us and may consume our output for step in pipeline_args.steps(): step._notifications_cfg = self.trait.notifications_cfg( step.notifications_cfg_name())
def process_pipeline_args(self, pipeline_args: JobVariant): # our step depends on dependency descriptor step component_descriptor_step = pipeline_args.step( concourse.model.traits.component_descriptor. DEFAULT_COMPONENT_DESCRIPTOR_STEP_NAME) self.source_scan_step._add_dependency(component_descriptor_step)
def process_pipeline_args(self, pipeline_args: JobVariant): main_repo = pipeline_args.main_repository() if main_repo: if 'trigger' not in pipeline_args.raw['repo']: main_repo._trigger = False