Example #1
0
    def test_get_pipelines_providers_mixed(self) -> None:
        data = Fixtures('queries').get_json('pipelines_providers_mixed.json')
        self.fixture_data = deepcopy(data)
        pps = queries.get_pipelines_providers()

        # the fixture has some keys overriden from the defaults
        for k in ['taskTemplates', 'pipelineTemplates']:
            assert data['pipelines_providers'][0]['defaults'][k] == pps[0][k]

        for k in ['retention', 'deployResources']:
            assert data['pipelines_providers'][0][k] == pps[0][k]
Example #2
0
    def test_get_pipelines_providers_all_defaults(self) -> None:
        data = Fixtures('queries'). \
            get_json('pipelines_providers_all_defaults.json')
        self.fixture_data = deepcopy(data)
        pps = queries.get_pipelines_providers()

        for k in [
                'retention', 'taskTemplates', 'pipelineTemplates',
                'deployResources'
        ]:
            assert data['pipelines_providers'][0]['defaults'][k] == pps[0][k]
    def test_get_pipelines_providers_mixed(self) -> None:
        data = Fixtures("queries").get_json("pipelines_providers_mixed.json")
        self.fixture_data = deepcopy(data)
        pps = queries.get_pipelines_providers()

        # the fixture has some keys overriden from the defaults
        for k in ["taskTemplates", "pipelineTemplates"]:
            assert data["pipelines_providers"][0]["defaults"][k] == pps[0][k]

        for k in ["retention", "deployResources"]:
            assert data["pipelines_providers"][0][k] == pps[0][k]
    def test_get_pipelines_providers_all_defaults(self) -> None:
        data = Fixtures("queries").get_json(
            "pipelines_providers_all_defaults.json")
        self.fixture_data = deepcopy(data)
        pps = queries.get_pipelines_providers()

        for k in [
                "retention", "taskTemplates", "pipelineTemplates",
                "deployResources"
        ]:
            assert data["pipelines_providers"][0]["defaults"][k] == pps[0][k]
Example #5
0
def run(dry_run,
        thread_pool_size=10,
        internal=None,
        use_jump_host=True,
        defer=None):
    settings = queries.get_app_interface_settings()
    pipelines_providers = queries.get_pipelines_providers()
    tkn_namespaces = [
        pp['namespace'] for pp in pipelines_providers
        if pp['provider'] == Providers.TEKTON
    ]

    oc_map = OC_Map(namespaces=tkn_namespaces,
                    integration=QONTRACT_INTEGRATION,
                    settings=settings,
                    internal=internal,
                    use_jump_host=use_jump_host,
                    thread_pool_size=thread_pool_size)
    defer(lambda: oc_map.cleanup())

    for pp in pipelines_providers:
        retention = pp.get('retention')
        if not retention:
            continue

        if pp['provider'] == Providers.TEKTON:
            ns_info = pp['namespace']
            namespace = ns_info['name']
            cluster = ns_info['cluster']['name']
            oc = oc_map.get(cluster)
            pipeline_runs = sorted(
                oc.get(namespace, 'PipelineRun')['items'],
                key=lambda k: k['metadata']['creationTimestamp'])

            retention_min = retention.get('minimum')
            if retention_min:
                pipeline_runs = pipeline_runs[retention_min:]

            retention_days = retention.get('days')
            for pr in pipeline_runs:
                name = pr['metadata']['name']
                if retention_days and \
                        within_retention_days(pr, retention_days):
                    continue

                logging.info([
                    'delete_trigger', cluster, namespace, 'PipelineRun', name
                ])
                if not dry_run:
                    oc.delete(namespace, 'PipelineRun', name)
def fetch_tkn_providers(saas_file_name: Optional[str]) -> dict[str, Any]:
    '''Fetch tekton providers data for the saas files handled here'''
    saas_files = fetch_saas_files(saas_file_name)
    if not saas_files:
        return {}

    duplicates: set[str] = set()
    all_tkn_providers = {}
    for pipeline_provider in queries.get_pipelines_providers():
        if pipeline_provider['provider'] != Providers.TEKTON:
            continue

        if pipeline_provider['name'] in all_tkn_providers:
            duplicates.add(pipeline_provider['name'])
        else:
            all_tkn_providers[pipeline_provider['name']] = pipeline_provider

    if duplicates:
        raise OpenshiftTektonResourcesBadConfigError(
            'There are duplicates in tekton providers names: '
            f'{", ".join(duplicates)}')

    # Only get the providers that are used by the saas files
    # Add the saas files belonging to it
    tkn_providers = {}
    for sf in saas_files:
        provider_name = sf['pipelinesProvider']['name']
        if provider_name not in tkn_providers:
            tkn_providers[provider_name] = all_tkn_providers[provider_name]

        if 'saas_files' not in tkn_providers[provider_name]:
            tkn_providers[provider_name]['saas_files'] = []

        tkn_providers[provider_name]['saas_files'].append(sf)

    return tkn_providers
def setup(thread_pool_size, internal, use_jump_host, integration,
          integration_version, v1, v2):
    """Setup required resources for triggering integrations

    Args:
        thread_pool_size (int): Thread pool size to use
        internal (bool): Should run for internal/extrenal/all clusters
        use_jump_host (bool): Should use jump host to reach clusters
        integration (string): Name of calling integration
        integration_version (string): Version of calling integration
        v1 (bool): Should trigger for v1 SaaS files
        v2 (bool): Should trigger for v2 SaaS files

    Returns:
        saasherder (SaasHerder): a SaasHerder instance
        jenkins_map (dict): Instance names with JenkinsApi instances
        oc_map (OC_Map): a dictionary of OC clients per cluster
        settings (dict): App-interface settings
        error (bool): True if one happened, False otherwise
    """

    saas_files = queries.get_saas_files(v1=v1, v2=v2)
    if not saas_files:
        logging.error("no saas files found")
        return None, None, None, None, True
    saas_files = [sf for sf in saas_files if is_in_shard(sf["name"])]

    # Remove saas-file targets that are disabled
    for saas_file in saas_files[:]:
        resource_templates = saas_file["resourceTemplates"]
        for rt in resource_templates[:]:
            targets = rt["targets"]
            for target in targets[:]:
                if target["disable"]:
                    targets.remove(target)

    instance = queries.get_gitlab_instance()
    settings = queries.get_app_interface_settings()
    accounts = queries.get_state_aws_accounts()
    gl = GitLabApi(instance, settings=settings)
    jenkins_map = jenkins_base.get_jenkins_map()
    pipelines_providers = queries.get_pipelines_providers()
    tkn_provider_namespaces = [
        pp["namespace"] for pp in pipelines_providers
        if pp["provider"] == "tekton"
    ]

    oc_map = OC_Map(
        namespaces=tkn_provider_namespaces,
        integration=integration,
        settings=settings,
        internal=internal,
        use_jump_host=use_jump_host,
        thread_pool_size=thread_pool_size,
    )

    saasherder = SaasHerder(
        saas_files,
        thread_pool_size=thread_pool_size,
        gitlab=gl,
        integration=integration,
        integration_version=integration_version,
        settings=settings,
        jenkins_map=jenkins_map,
        accounts=accounts,
    )

    return saasherder, jenkins_map, oc_map, settings, False