Exemplo n.º 1
0
def serialise_cfg(
        cfg_dir: CliHints.existing_dir(), cfg_sets: [str], out_file: str):
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    cfg_sets = [factory.cfg_set(cfg_set) for cfg_set in cfg_sets]
    serialiser = CSS(cfg_sets=cfg_sets, cfg_factory=factory)
    with open(out_file, 'w') as f:
        f.write(serialiser.serialise())
Exemplo n.º 2
0
    def setUp(self):
        self.tmpdir = TemporaryDirectory()

        # type definition
        self.types_file = self._file(
            'types', '''
        a_type:
          src:
          - file: a_type_values.xxx
          model:
            cfg_type_name: a_type
            type: NamedModelElement
        defined_but_unused_type:
          src:
          - file: defined_but_unused_type_values.xxx
          model:
            cfg_type_name: defined_but_unused_type
            type: NamedModelElement
        cfg_set:
          src:
          - file: configs
          model:
            cfg_type_name: cfg_set
            type: ConfigurationSet
        ''')

        # cfg_set definitions
        self.configs_file = self._file(
            'configs', '''
        singleton_set:
            a_type: first_value_of_a
        second_set:
            a_type: second_value_of_a
        set_with_multiple_values:
            a_type:
              config_names:
              - first_value_of_a
              - second_value_of_a
              default: second_value_of_a
        ''')

        # value definitions
        self.a_type_values_file = self._file(
            'a_type_values.xxx', '''
        first_value_of_a:
            some_value: 123
        second_value_of_a:
            some_value: 42
        ignored_value_of_a:
            some_value: xxx
        ''')

        self.defined_but_unused_type_values_file = self._file(
            'defined_but_unused_type_values.xxx', '''
            unused:
                some_value: 7
        ''')

        self.examinee = ConfigFactory.from_cfg_dir(
            cfg_dir=self.tmpdir.name, cfg_types_file=self.types_file)
Exemplo n.º 3
0
def serialise_cfg(
        cfg_dir: CliHints.existing_dir(), out_file: str, cfg_sets: [str] = []):
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    if not cfg_sets:
        cfg_sets = factory._cfg_element_names('cfg_set')
    cfg_sets = [factory.cfg_set(cfg_set) for cfg_set in cfg_sets]
    serialiser = CSS(cfg_sets=cfg_sets, cfg_factory=factory)
    with open(out_file, 'w') as f:
        f.write(serialiser.serialise())
Exemplo n.º 4
0
def _cfg_factory_from_dir():
    if Config.CONTEXT.value.config_dir() is None:
        return None

    from util import existing_dir
    cfg_dir = existing_dir(Config.CONTEXT.value.config_dir())

    from model import ConfigFactory
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    return factory
Exemplo n.º 5
0
def _cfg_factory_from_dir():
    if not args or not args.cfg_dir:
        return None

    from util import ensure_directory_exists
    cfg_dir = ensure_directory_exists(args.cfg_dir)

    from model import ConfigFactory
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    return factory
Exemplo n.º 6
0
def replicate_pipeline_definitions(
    definition_dir: str,
    cfg_dir: str,
    cfg_name: str,
):
    '''
    replicates pipeline definitions from cc-pipelines to component repositories.
    will only be required until definitions are moved to component repositories.
    '''
    util.ensure_directory_exists(definition_dir)
    util.ensure_directory_exists(cfg_dir)

    cfg_factory = ConfigFactory.from_cfg_dir(cfg_dir)
    cfg_set = cfg_factory.cfg_set(cfg_name)
    github_cfg = cfg_set.github()

    github = _create_github_api_object(github_cfg=github_cfg)

    repo_mappings = util.parse_yaml_file(os.path.join(definition_dir, '.repository_mapping'))

    for repo_path, definition_file in repo_mappings.items():
        # hack: definition_file is a list with always exactly one entry
        definition_file = util.ensure_file_exists(os.path.join(definition_dir, definition_file[0]))
        with open(definition_file) as f:
            definition_contents = f.read()

        repo_owner, repo_name = repo_path.split('/')


        helper = GitHubHelper(
            github=github,
            repository_owner=repo_owner,
            repository_name=repo_name,
        )
        # only do this for branch 'master' to avoid merge conflicts
        for branch_name in ['master']: #branches(github_cfg, repo_owner, repo_name):
            util.info('Replicating pipeline-definition: {r}:{b}'.format(
                    r=repo_path,
                    b=branch_name,
                )
            )
            # create pipeline definition file in .ci/pipeline_definitions
            try:
                helper.create_or_update_file(
                    repository_branch=branch_name,
                    repository_version_file_path='.ci/pipeline_definitions',
                    file_contents=definition_contents,
                    commit_message="Import cc-pipeline definition"
                )
            except:
                pass # keep going
Exemplo n.º 7
0
    def setUp(self):
        self.tmpdir = TemporaryDirectory()

        # type definition
        self.types_file = self._file(
            'types', '''
        a_type:
          src:
          - file: a_type_values.xxx
          model:
            cfg_type_name: a_type
            type: NamedModelElement
        cfg_set:
          src:
          - file: configs
          model:
            cfg_type_name: cfg_set
            type: ConfigurationSet
        ''')

        # cfg_set definitions
        self.configs_file = self._file(
            'configs', '''
        first_set:
            a_type: first_value_of_a
        second_set:
            a_type: second_value_of_a
        ''')

        # value definitions
        self.values_file = self._file(
            'a_type_values.xxx', '''
        first_value_of_a:
            some_value: 123
        second_value_of_a:
            some_value: 42
        ignored_value_of_a:
            some_value: xxx
        ''')

        self.examinee = ConfigFactory.from_cfg_dir(
            cfg_dir=self.tmpdir.name, cfg_types_file=self.types_file)
Exemplo n.º 8
0
def deploy_pipeline(
        pipeline_file: CliHint('generated pipeline definition to deploy'),
        pipeline_name: CliHint('the name under which the pipeline shall be deployed'),
        team_name: CliHint('name of the target team'),
        config_dir: CliHints.existing_dir('directory containing Concourse configuration'),
        config_name: CliHint('identifier of the configuration in the config directory to use')
):
    cfg_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir)
    concourse_cfg = cfg_factory.concourse(config_name)
    team_credentials = concourse_cfg.team_credentials(team_name)

    with open(pipeline_file) as f:
        pipeline_definition = f.read()

    pipelines.deploy_pipeline(
        pipeline_definition=pipeline_definition,
        pipeline_name=pipeline_name,
        concourse_cfg=concourse_cfg,
        team_credentials=team_credentials,
    )
Exemplo n.º 9
0
 def test_absent_cfg_types_file_causes_failure(self):
     with self.assertRaises(FileNotFoundError):
         ConfigFactory.from_cfg_dir(
             cfg_dir=self.tmpdir.name,
             cfg_types_file='another absent file'
         )
Exemplo n.º 10
0
 def test_absent_directory_causes_failure(self):
     with self.assertRaises(Failure):
         ConfigFactory.from_cfg_dir(cfg_dir='should not exist')
Exemplo n.º 11
0
def deploy_and_run_smoketest_pipeline(
    config_dir: str,
    config_name: str,
    concourse_team_name: str,
    cc_pipelines_repo_dir: str,
    cc_utils_repo_dir: str,
    wait_for_job_execution: bool=False,
):
    config_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir)
    config_set = config_factory.cfg_set(cfg_name=config_name)
    concourse_cfg = config_set.concourse()

    # as this is an integration test, hard-code assumptions about the layout of
    # our pipelines repository
    template_path = os.path.join(cc_utils_repo_dir, 'concourse', 'templates')
    template_include_dir = os.path.join(cc_utils_repo_dir, 'concourse')
    pipeline_name = 'cc-smoketest'

    # retrieve pipeline-definition from github at hardcoded location
    github_cfg = config_set.github()

    githubrepobranch = GitHubRepoBranch(
        github_config=github_cfg,
        repo_owner='kubernetes',
        repo_name='cc-smoketest',
        branch='master',
    )

    helper = GitHubRepositoryHelper.from_githubrepobranch(
      githubrepobranch=githubrepobranch,
    )
    pipeline_definition = yaml.load(
        helper.retrieve_text_file_contents(
            file_path='.ci/smoketest-pipeline.yaml',
        ),
        Loader=yaml.SafeLoader,
    )

    definition_descriptor = DefinitionDescriptor(
        pipeline_name=pipeline_name,
        pipeline_definition=pipeline_definition[pipeline_name],
        main_repo={'path': 'kubernetes/cc-smoketest', 'branch': 'master'},
        concourse_target_cfg=concourse_cfg,
        concourse_target_team=concourse_team_name,
    )

    preprocessor = DefinitionDescriptorPreprocessor()
    template_retriever = TemplateRetriever(template_path=template_path)
    renderer = Renderer(
        template_retriever=template_retriever,
        template_include_dir=template_include_dir,
        cfg_set=config_set,
    )
    deployer = ConcourseDeployer(
        unpause_pipelines=True,
        expose_pipelines=True
    )

    definition_descriptor = preprocessor.process_definition_descriptor(definition_descriptor)
    rendering_result = renderer.render(definition_descriptor)

    info('deploying pipeline')
    deployment_result = deployer.deploy(rendering_result.definition_descriptor)

    if not deployment_result.deploy_status & DeployStatus.SUCCEEDED:
        fail('deployment failed')
Exemplo n.º 12
0
def deploy_and_run_smoketest_pipeline(
    config_dir: str,
    config_name: str,
    concourse_team_name: str,
    cc_pipelines_repo_dir: str,
    wait_for_job_execution: bool = False,
):
    config_factory = ConfigFactory.from_cfg_dir(cfg_dir=config_dir)
    config_set = config_factory.cfg_set(cfg_name=config_name)
    concourse_cfg = config_set.concourse()
    team_credentials = concourse_cfg.team_credentials(concourse_team_name)

    # as this is an integration test, hard-code assumptions about the layout of
    # our pipelines repository
    calcdir = lambda path: os.path.join(cc_pipelines_repo_dir, path)

    pipeline_definition_file = calcdir('definitions/test/cc-smoketest.yaml')
    template_path = calcdir('templates')
    template_include_dir = cc_pipelines_repo_dir
    pipeline_name = 'cc-smoketest'
    job_name = 'cc-smoketest-master-head-update-job'

    pipeline_definition = parse_yaml_file(pipeline_definition_file,
                                          as_snd=False)

    pipeline_descriptor = RawPipelineDefinitionDescriptor(
        name=pipeline_name,
        base_definition=pipeline_definition[pipeline_name]['base_definition'],
        variants=pipeline_definition[pipeline_name]['variants'],
        template=pipeline_definition[pipeline_name]['template'],
    )

    rendered_pipelines = list(
        render_pipelines(
            pipeline_definition=pipeline_descriptor,
            config_set=config_set,
            template_path=[template_path],
            template_include_dir=template_include_dir,
        ))
    if len(rendered_pipelines) == 0:
        fail("smoke-test pipeline definition not found")
    if len(rendered_pipelines) > 1:
        fail("expected exactly one smoketest pipeline-definition, got {n}".
             format(n=len(rendered_pipelines)))
    pipeline_definition, _, _ = rendered_pipelines[0]

    deploy_pipeline(
        pipeline_definition=pipeline_definition,
        pipeline_name=pipeline_name,
        concourse_cfg=concourse_cfg,
        team_credentials=team_credentials,
    )

    api = ConcourseApi(base_url=concourse_cfg.external_url(),
                       team_name=concourse_team_name)
    api.login(team=team_credentials.teamname(),
              username=team_credentials.username(),
              passwd=team_credentials.passwd())

    # trigger an execution and wait for it to finish
    info('triggering smoketest job {jn}'.format(jn=job_name))
    api.trigger_build(pipeline_name, job_name)

    if not wait_for_job_execution:
        info(
            'will not wait for job-execution to finish (--wait-for-job-execution not set)'
        )
        return

    # wait for the job to finish (currently we expect it to succeed)
    # todo: evaluate whether its structure meets our spec

    builds = api.job_builds(pipeline_name, job_name)
    if not builds or len(builds) < 1:
        fail('no builds were found (expected at least one!)')

    last_build = builds[-1]  # please let this be ours

    # now wait for it to finish
    build_event_handler = api.build_events(last_build.id())
    build_event_handler.process_events()

    info('it seems as if the job finished sucessfully; life is good :-)')
Exemplo n.º 13
0
        cfg = merge_global_cfg(cfg, additional_cfg)


load_config()


def _cfg_factory_from_dir():
    if not cfg or not cfg.ctx or not (cfg_dir := cfg.ctx.config_dir):
        return None

    from ci.util import existing_dir
    cfg_dir = existing_dir(cfg_dir)

    from model import ConfigFactory
    factory = ConfigFactory.from_cfg_dir(cfg_dir=cfg_dir)
    return factory


def _secrets_server_client():
    import ccc.secrets_server
    try:
        if bool(args.server_endpoint) ^ bool(args.concourse_cfg_name):
            raise ValueError(
                'either all or none of server-endpoint and concourse-cfg-name must be set'
            )
        if args.server_endpoint or args.cache_file:
            return ccc.secrets_server.SecretsServerClient(
                endpoint_url=args.server_endpoint,
                concourse_secret_name=args.concourse_cfg_name,
                cache_file=args.cache_file)