def current_product_descriptor(): component_descriptor = os.path.join( util.check_env('COMPONENT_DESCRIPTOR_DIR'), 'component_descriptor', ) return product.model.ComponentDescriptor.from_dict( util.parse_yaml_file(component_descriptor), )
def parse_component_descriptor(): component_descriptor_file = os.path.join( util.check_env('COMPONENT_DESCRIPTOR_DIR'), 'component_descriptor') component_descriptor = product.model.ComponentDescriptor.from_dict( raw_dict=util.parse_yaml_file(component_descriptor_file)) return component_descriptor
def enumerate_definition_descriptors(self): info('enumerating explicitly specified definition file') try: definitions = parse_yaml_file(self.definition_file) yield from self._wrap_into_descriptors( repo_path=self.repo_path, repo_hostname=self.repo_host, branch=self.repo_branch, raw_definitions=definitions, ) except BaseException as e: yield DefinitionDescriptor( pipeline_name='<invalid YAML>', pipeline_definition={}, main_repo={ 'path': self.repo_path, 'branch': self.repo_branch, 'hostname': self.repo_host, }, concourse_target_cfg=self.cfg_set.concourse(), concourse_target_team=self.job_mapping.team_name(), override_definitions=(), exception=e, )
def current_product_descriptor(): component_descriptor_dir = pathlib.Path( util.check_env('COMPONENT_DESCRIPTOR_DIR')).absolute() component_descriptor = component_descriptor_dir.joinpath( 'component_descriptor') raw = util.parse_yaml_file(component_descriptor) return product.model.Product.from_dict(raw)
def from_cfg_dir(cfg_dir: str, cfg_types_file='config_types.yaml'): cfg_dir = existing_dir(os.path.abspath(cfg_dir)) cfg_types_dict = parse_yaml_file(os.path.join(cfg_dir, cfg_types_file)) raw = {} raw[ConfigFactory.CFG_TYPES] = cfg_types_dict def parse_cfg(cfg_type): # assume for now that there is exactly one cfg source (file) cfg_sources = list(cfg_type.sources()) if not len(cfg_sources) == 1: raise ValueError( 'currently, only exactly one cfg file is supported per type' ) cfg_file = cfg_sources[0].file() parsed_cfg = parse_yaml_file(os.path.join(cfg_dir, cfg_file)) return parsed_cfg # parse all configurations for cfg_type in map(ConfigType, cfg_types_dict.values()): cfg_name = cfg_type.cfg_type_name() raw[cfg_name] = parse_cfg(cfg_type) return ConfigFactory(raw_dict=raw)
def upload_product_images( protecode_cfg_name: str, product_cfg_file: CliHints.existing_file(), processing_mode: CliHint( choices=list(ProcessingMode), type=ProcessingMode, )=ProcessingMode.UPLOAD_IF_CHANGED, protecode_group_id: int=5, parallel_jobs: int=4, cve_threshold: int=7, ignore_if_triaged: bool=True, reference_group_ids: [int]=[], ): cfg_factory = ctx().cfg_factory() protecode_cfg = cfg_factory.protecode(protecode_cfg_name) product_descriptor = ComponentDescriptor.from_dict( raw_dict=parse_yaml_file(product_cfg_file) ) upload_results, license_report = upload_images( protecode_cfg=protecode_cfg, product_descriptor=product_descriptor, protecode_group_id=protecode_group_id, parallel_jobs=parallel_jobs, cve_threshold=cve_threshold, ignore_if_triaged=ignore_if_triaged, processing_mode=processing_mode, reference_group_ids=reference_group_ids, )
def download_dependencies( component_descriptor: CliHints.existing_file(), out_dir: str, ): if not os.path.isdir(out_dir): os.mkdir(out_dir) component_descriptor = ComponentDescriptor.from_dict(parse_yaml_file(component_descriptor)) image_references = [ container_image.image_reference() for _, container_image in _enumerate_effective_images(component_descriptor=component_descriptor) ] def mangled_outfile_name(image_reference): mangled_fname = image_reference.replace(':', '_').replace('/', '_') return os.path.join(out_dir, mangled_fname + '.tar') for image_ref in image_references: fname = mangled_outfile_name(image_ref) with open(fname, 'wb') as f: container.registry.retrieve_container_image( image_reference=image_ref, outfileobj=f, ) print(fname)
def components_with_version_changes(component_diff_path: str): if not os.path.isfile(component_diff_path): util.info('no component_diff found at: ' + str(component_diff_path)) return set() else: component_diff = util.parse_yaml_file(component_diff_path) comp_names = component_diff.get('component_names_with_version_changes', set()) return set(comp_names)
def parse_cfg(cfg_type): # assume for now that there is exactly one cfg source (file) cfg_sources = list(cfg_type.sources()) if not len(cfg_sources) == 1: raise ValueError('currently, only exactly one cfg file is supported per type') cfg_file = cfg_sources[0].file() parsed_cfg = parse_yaml_file(os.path.join(cfg_dir, cfg_file), as_snd=False) return parsed_cfg
def enumerate_pipeline_definitions(directories): for directory in directories: # for now, hard-code mandatory .repository_mapping repo_mapping = parse_yaml_file( os.path.join(directory, '.repository_mapping')) repo_definition_mapping = { repo_path: list() for repo_path in repo_mapping.keys() } for repo_path, definition_files in repo_mapping.items(): for definition_file_path in definition_files: abs_file = os.path.abspath( os.path.join(directory, definition_file_path)) pipeline_raw_definition = parse_yaml_file(abs_file, as_snd=False) repo_definition_mapping[repo_path].append( pipeline_raw_definition) for repo_path, definitions in repo_definition_mapping.items(): yield (repo_path, definitions)
def replicate_pipeline_definitions( definition_dir: str, cfg_dir: str, cfg_name: str, ): ''' replicates pipeline definitions from cc-pipelines to component repositories. will only be required until definitions are moved to component repositories. ''' util.ensure_directory_exists(definition_dir) util.ensure_directory_exists(cfg_dir) cfg_factory = ConfigFactory.from_cfg_dir(cfg_dir) cfg_set = cfg_factory.cfg_set(cfg_name) github_cfg = cfg_set.github() github = _create_github_api_object(github_cfg=github_cfg) repo_mappings = util.parse_yaml_file(os.path.join(definition_dir, '.repository_mapping')) for repo_path, definition_file in repo_mappings.items(): # hack: definition_file is a list with always exactly one entry definition_file = util.ensure_file_exists(os.path.join(definition_dir, definition_file[0])) with open(definition_file) as f: definition_contents = f.read() repo_owner, repo_name = repo_path.split('/') helper = GitHubHelper( github=github, repository_owner=repo_owner, repository_name=repo_name, ) # only do this for branch 'master' to avoid merge conflicts for branch_name in ['master']: #branches(github_cfg, repo_owner, repo_name): util.info('Replicating pipeline-definition: {r}:{b}'.format( r=repo_path, b=branch_name, ) ) # create pipeline definition file in .ci/pipeline_definitions try: helper.create_or_update_file( repository_branch=branch_name, repository_version_file_path='.ci/pipeline_definitions', file_contents=definition_contents, commit_message="Import cc-pipeline definition" ) except: pass # keep going
def resolve_component_descriptor( component_descriptor: CliHints.existing_file(), ): cfg_factory = ctx().cfg_factory() resolver = ComponentDescriptorResolver( cfg_factory=cfg_factory, ) component_descriptor = ComponentDescriptor.from_dict(parse_yaml_file(component_descriptor)) resolved_descriptor = resolver.resolve_component_references(product=component_descriptor) print(yaml.dump(resolved_descriptor.raw))
def component_descriptor_to_xml( component_descriptor: CliHints.existing_file(), out_file: str, ): component_descriptor = ComponentDescriptor.from_dict(parse_yaml_file(component_descriptor)) def images(component_descriptor): for component in component_descriptor.components(): yield from component.dependencies().container_images() result_xml = product.xml.container_image_refs_to_xml( container_images=images(component_descriptor), ) result_xml.write(out_file)
def add_dependencies( descriptor_src_file: CliHints.existing_file(), component_name: str, component_version: str, descriptor_out_file: str=None, component_dependencies: CliHint(typehint=_parse_component_deps, action='append')=[], container_image_dependencies: CliHint(typehint=_parse_container_image_deps, action='append')=[], web_dependencies: CliHint(typehint=_parse_web_deps, action='append')=[], generic_dependencies: CliHint(typehint=_parse_generic_deps, action='append')=[], ): product = ComponentDescriptor.from_dict(parse_yaml_file(descriptor_src_file)) component = product.component( ComponentReference.create(name=component_name, version=component_version) ) if not component: fail('component {c}:{v} was not found in {f}'.format( c=component_name, v=component_version, f=descriptor_src_file ) ) component_deps = component.dependencies() for component_ref in component_dependencies: component_deps.add_component_dependency(component_ref) for image_dep in container_image_dependencies: component_deps.add_container_image_dependency(image_dep) for web_dep in web_dependencies: component_deps.add_web_dependency(web_dep) for generic_dep in generic_dependencies: component_deps.add_generic_dependency(generic_dep) product_dict = json.loads(json.dumps({'components': [component.raw]})) if not descriptor_out_file: print(yaml.dump(product_dict, indent=2)) else: with open(descriptor_out_file, 'w') as f: yaml.dump(product_dict, f, indent=2)
def cfg_from_callback( repo_root, callback_path, effective_cfg_file, ): import subprocess import os import tempfile import util tmp_file = tempfile.NamedTemporaryFile() cb_env = os.environ.copy() cb_env['REPO_ROOT'] = repo_root cb_env['NOTIFY_CFG_OUT'] = tmp_file.name cb_env['EFFECTIVE_CFG'] = effective_cfg_file subprocess.run( [callback_path], check=True, env=cb_env, ) return util.parse_yaml_file(tmp_file.name)
def load_config_from_user_home(): config_file = Path.home() / '.cc-utils.cfg' if config_file.is_file(): return util.parse_yaml_file(config_file) return {}
def deploy_and_run_smoketest_pipeline( config_dir: str, config_name: str, concourse_team_name: str, cc_pipelines_repo_dir: str, wait_for_job_execution: bool = False, ): config_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir) config_set = config_factory.cfg_set(cfg_name=config_name) concourse_cfg = config_set.concourse() team_credentials = concourse_cfg.team_credentials(concourse_team_name) # as this is an integration test, hard-code assumptions about the layout of # our pipelines repository calcdir = lambda path: os.path.join(cc_pipelines_repo_dir, path) pipeline_definition_file = calcdir('definitions/test/cc-smoketest.yaml') template_path = calcdir('templates') template_include_dir = cc_pipelines_repo_dir pipeline_name = 'cc-smoketest' job_name = 'cc-smoketest-master-head-update-job' pipeline_definition = parse_yaml_file(pipeline_definition_file, as_snd=False) pipeline_descriptor = RawPipelineDefinitionDescriptor( name=pipeline_name, base_definition=pipeline_definition[pipeline_name]['base_definition'], variants=pipeline_definition[pipeline_name]['variants'], template=pipeline_definition[pipeline_name]['template'], ) rendered_pipelines = list( render_pipelines( pipeline_definition=pipeline_descriptor, config_set=config_set, template_path=[template_path], template_include_dir=template_include_dir, )) if len(rendered_pipelines) == 0: fail("smoke-test pipeline definition not found") if len(rendered_pipelines) > 1: fail("expected exactly one smoketest pipeline-definition, got {n}". format(n=len(rendered_pipelines))) pipeline_definition, _, _ = rendered_pipelines[0] deploy_pipeline( pipeline_definition=pipeline_definition, pipeline_name=pipeline_name, concourse_cfg=concourse_cfg, team_credentials=team_credentials, ) api = ConcourseApi(base_url=concourse_cfg.external_url(), team_name=concourse_team_name) api.login(team=team_credentials.teamname(), username=team_credentials.username(), passwd=team_credentials.passwd()) # trigger an execution and wait for it to finish info('triggering smoketest job {jn}'.format(jn=job_name)) api.trigger_build(pipeline_name, job_name) if not wait_for_job_execution: info( 'will not wait for job-execution to finish (--wait-for-job-execution not set)' ) return # wait for the job to finish (currently we expect it to succeed) # todo: evaluate whether its structure meets our spec builds = api.job_builds(pipeline_name, job_name) if not builds or len(builds) < 1: fail('no builds were found (expected at least one!)') last_build = builds[-1] # please let this be ours # now wait for it to finish build_event_handler = api.build_events(last_build.id()) build_event_handler.process_events() info('it seems as if the job finished sucessfully; life is good :-)')
def parse_product_file(f): return Product.from_dict(parse_yaml_file(f))
def parse_product_file(f): return ComponentDescriptor.from_dict(parse_yaml_file(f))