def fetch_provider_resource(path): gqlapi = gql.get_api() # get resource data try: resource = gqlapi.get_resource(path) except gql.GqlApiError as e: raise FetchResourceError(e.message) try: resource['body'] = anymarkup.parse(resource['content'], force_types=None) except anymarkup.AnyMarkupError: e_msg = "Could not parse data. Skipping resource: {}" raise FetchResourceError(e_msg.format(path)) openshift_resource = OR(resource['body']) try: openshift_resource.verify_valid_k8s_object() except (KeyError, TypeError) as e: k = e.__class__.__name__ e_msg = "Invalid data ({}). Skipping resource: {}" raise FetchResourceError(e_msg.format(k, path)) return openshift_resource
def run(dry_run): config = get_config() token = vault_client.read( config['github-repo-invites']['secret_path'], config['github-repo-invites']['secret_field']) g = utils.raw_github_api.RawGithubApi(token) gqlapi = gql.get_api() result = gqlapi.query(REPOS_QUERY) urls = [] for app in result['apps_v1']: code_components = app['codeComponents'] if code_components is None: continue for code_component in app['codeComponents']: urls.append(code_component['url']) for i in g.repo_invitations(): invitation_id = i['id'] invitation_url = i['html_url'] url = os.path.dirname(invitation_url) if url in urls: logging.info(['accept', url]) if not dry_run: g.accept_repo_invitation(invitation_id) else: logging.debug(['skipping', url])
def fetch_desired_state(): gqlapi = gql.get_api() result = gqlapi.query(QUERY) state = AggregatedList() for role in result['roles']: permissions = list( filter( lambda p: p.get('service') in ['github-org', 'github-org-team'], role['permissions'])) if permissions: members = [] for user in role['users']: members.append(user['github_username']) for bot in role['bots']: if 'github_username' in bot: members.append(bot['github_username']) for permission in permissions: if permission['service'] == 'github-org': state.add(permission, members) elif permission['service'] == 'github-org-team': state.add(permission, members) state.add( { 'service': 'github-org', 'org': permission['org'], }, members) return state
def fetch_desired_state(): gqlapi = gql.get_api() result = gqlapi.query(QUAY_ORG_QUERY) state = AggregatedList() for role in result['roles']: permissions = list( filter(lambda p: p.get('service') == 'quay-membership', role['permissions'])) if permissions: members = [] def append_quay_username_members(member): quay_username = member.get('quay_username') if quay_username: members.append(quay_username) for user in role['users']: append_quay_username_members(user) for bot in role['bots']: append_quay_username_members(bot) list(map(lambda p: state.add(p, members), permissions)) return state
def run(dry_run=False, thread_pool_size=10, internal=None, use_jump_host=True, defer=None): try: gqlapi = gql.get_api() namespaces = [ namespace_info for namespace_info in gqlapi.query(NAMESPACES_QUERY)['namespaces'] if namespace_info.get('networkPoliciesAllow') ] ri, oc_map = ob.fetch_current_state( namespaces=namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, override_managed_types=['NetworkPolicy'], internal=internal, use_jump_host=use_jump_host) defer(lambda: oc_map.cleanup()) fetch_desired_state(namespaces, ri, oc_map) ob.realize_data(dry_run, oc_map, ri) except Exception as e: msg = 'There was problem running openshift network policies reconcile.' msg += ' Exception: {}' msg = msg.format(str(e)) logging.error(msg) sys.exit(1)
def get_tf_resource_info(namespace, identifier): """ Extracting the terraformResources information from the namespace for a given identifier :param namespace: the namespace dictionary :param identifier: the identifier we are looking for :return: the terraform resource information dictionary """ tf_resources = namespace['terraformResources'] for tf_resource in tf_resources: if tf_resource['identifier'] != identifier: continue if tf_resource['provider'] != 'rds': continue defaults = gql.get_api().get_resource(tf_resource['defaults']) output_resource_name = tf_resource['output_resource_name'] if output_resource_name is None: output_resource_name = (f'{tf_resource["identifier"]}-' f'{tf_resource["provider"]}') return { 'cluster': namespace['cluster']['name'], 'output_resource_name': output_resource_name, 'engine': defaults.get('engine', 'postgres'), 'engine_version': defaults.get('engine_version', 'latest'), }
def run(dry_run, thread_pool_size=10, internal=None, use_jump_host=True, providers=[], cluster_name=None, namespace_name=None, init_api_resources=False, defer=None): gqlapi = gql.get_api() namespaces = [ namespace_info for namespace_info in gqlapi.query(NAMESPACES_QUERY)['namespaces'] if is_in_shard(f"{namespace_info['cluster']['name']}/" + f"{namespace_info['name']}") ] namespaces = \ filter_namespaces_by_cluster_and_namespace( namespaces, cluster_name, namespace_name ) namespaces = canonicalize_namespaces(namespaces, providers) oc_map, ri = \ fetch_data(namespaces, thread_pool_size, internal, use_jump_host, init_api_resources=init_api_resources) defer(lambda: oc_map.cleanup()) ob.realize_data(dry_run, oc_map, ri) if ri.has_error_registered(): sys.exit(1) return ri
def get_desired_state(): gqlapi = gql.get_api() roles = gqlapi.query(ROLES_QUERY)['roles'] desired_state = [] for r in roles: for p in r['permissions']: if p['service'] != 'jenkins-role': continue for u in r['users']: desired_state.append({ "instance": p['instance']['name'], "role": p['role'], "user": u['org_username'] }) for u in r['bots']: if u['org_username'] is None: continue desired_state.append({ "instance": p['instance']['name'], "role": p['role'], "user": u['org_username'] }) return desired_state
def get_saas_files(saas_file_name=None, env_name=None, app_name=None): """ Returns SaasFile resources defined in app-interface """ gqlapi = gql.get_api() saas_files = gqlapi.query(SAAS_FILES_QUERY)['saas_files'] if saas_file_name is None and env_name is None and app_name is None: return saas_files if saas_file_name == '' or env_name == '' or app_name == '': return [] for saas_file in saas_files[:]: if saas_file_name: if saas_file['name'] != saas_file_name: saas_files.remove(saas_file) continue if env_name: resource_templates = saas_file['resourceTemplates'] for rt in resource_templates[:]: targets = rt['targets'] for target in targets[:]: namespace = target['namespace'] environment = namespace['environment'] if environment['name'] != env_name: targets.remove(target) if not targets: resource_templates.remove(rt) if not resource_templates: saas_files.remove(saas_file) continue if app_name: if saas_file['app']['name'] != app_name: saas_files.remove(saas_file) continue return saas_files
def fetch_desired_state(): gqlapi = gql.get_api() result = gqlapi.query(QUAY_REPOS_QUERY) state = AggregatedList() for app in result['apps']: quay_repos = app.get('quayRepos') if quay_repos is None: continue for quay_repo in quay_repos: name = quay_repo['org']['name'] for repo in quay_repo['items']: params = {'org': name, 'repo': repo['name']} item = { 'public': repo['public'], 'description': repo['description'].strip() } state.add(params, item) return state
def fetch_desired_state(): gqlapi = gql.get_api() result = gqlapi.query(QUAY_REPOS_QUERY) state = AggregatedList() for app in result['apps']: quay_repos = app.get('quayRepos') if quay_repos is None: continue for quay_repo in quay_repos: name = quay_repo['org']['name'] for repo in quay_repo['items']: params = {'org': name, 'repo': repo['name']} # Avoiding duplicates try: state.get(params) logging.error(['Repository %s/%s defined more than once'], params['org'], params['repo']) sys.exit(ExitCodes.ERROR) except KeyError: pass item = { 'public': repo['public'], 'description': repo['description'].strip() } state.add(params, item) return state
def run(dry_run): settings = queries.get_app_interface_settings() dependency_map = settings.get('dependencies') if not dependency_map: sys.exit() gqlapi = gql.get_api() apps = gqlapi.query(APPS_QUERY)['apps'] error = False for app in apps: app_name = app['name'] app_deps = app.get('dependencies') current_deps = \ [a['name'] for a in app_deps] if app_deps else [] desired_deps = get_desired_dependency_names(app, dependency_map) missing_deps = list(desired_deps.difference(current_deps)) if missing_deps: error = True msg = f"App '{app_name}' has missing dependencies: {missing_deps}" logging.error(msg) redundant_deps = list(set(current_deps).difference(desired_deps)) if redundant_deps: msg = \ f"App '{app_name}' has redundant dependencies: " + \ f"{redundant_deps}" logging.debug(msg) if error: sys.exit(1)
def get_prometheus_rules_paths(): gqlapi = gql.get_api() paths = set() for r in gqlapi.query(PROMETHEUS_RULES_PATHS_QUERY)['resources']: paths.add(r['path']) return paths
def setup(dry_run, print_only, thread_pool_size, internal, use_jump_host, account_name): gqlapi = gql.get_api() accounts = queries.get_aws_accounts() if account_name: accounts = [n for n in accounts if n['name'] == account_name] if not accounts: raise ValueError(f"aws account {account_name} is not found") settings = queries.get_app_interface_settings() namespaces = gqlapi.query(TF_NAMESPACES_QUERY)['namespaces'] tf_namespaces = [namespace_info for namespace_info in namespaces if namespace_info.get('managedTerraformResources')] ri, oc_map = fetch_current_state(dry_run, tf_namespaces, thread_pool_size, internal, use_jump_host) ts, working_dirs = init_working_dirs(accounts, thread_pool_size, print_only=print_only, oc_map=oc_map, settings=settings) tf = Terraform(QONTRACT_INTEGRATION, QONTRACT_INTEGRATION_VERSION, QONTRACT_TF_PREFIX, working_dirs, thread_pool_size) existing_secrets = tf.get_terraform_output_secrets() ts.populate_resources(tf_namespaces, existing_secrets, account_name) ts.dump(print_only, existing_dirs=working_dirs) return ri, oc_map, tf
def collect_configs(): gqlapi = gql.get_api() raw_jjb_configs = gqlapi.query(QUERY)['jenkins_configs'] saas_file_configs, settings = collect_saas_file_configs() configs = raw_jjb_configs + saas_file_configs return configs, settings
def __init__(self, dry_run=False): self.dry_run = dry_run self.gqlapi = gql.get_api() settings = queries.get_app_interface_settings() self.secret_reader = SecretReader(settings=settings) self.skopeo_cli = Skopeo(dry_run) self.push_creds = self._get_push_creds()
def get_github_instance(): """ Returns a single Github instance """ gqlapi = gql.get_api() instances = gqlapi.query(GITHUB_INSTANCE_QUERY)['instances'] for instance in instances: if instance['url'] == "https://github.com/app-sre": return instance
def validate_diffs(diffs): gqlapi = gql.get_api() clusters_query = gqlapi.query(GROUPS_QUERY)['clusters'] desired_combos = [{ "cluster": diff['cluster'], "group": diff['group'] } for diff in diffs] desired_combos_unique = [] [ desired_combos_unique.append(item) for item in desired_combos if item not in desired_combos_unique ] valid_combos = [{ "cluster": cluster['name'], "group": group } for cluster in clusters_query for group in cluster['managedGroups'] or []] invalid_combos = [ item for item in desired_combos_unique if item not in valid_combos ] if len(invalid_combos) != 0: for combo in invalid_combos: msg = ('invalid cluster/group combination: {}/{}' ' (hint: should be added to managedGroups)').format( combo['cluster'], combo['group']) logging.error(msg) sys.exit(1)
def get_desired_state(internal, use_jump_host, thread_pool_size): gqlapi = gql.get_api() all_namespaces = gqlapi.query(QUERY)['namespaces'] namespaces = [] for namespace in all_namespaces: shard_key = f'{namespace["cluster"]["name"]}/{namespace["name"]}' if is_in_shard(shard_key): namespaces.append(namespace) ri = ResourceInventory() settings = queries.get_app_interface_settings() oc_map = OC_Map(namespaces=namespaces, integration=QONTRACT_INTEGRATION, settings=settings, internal=internal, use_jump_host=use_jump_host, thread_pool_size=thread_pool_size, init_projects=True) ob.init_specs_to_fetch(ri, oc_map, namespaces=namespaces, override_managed_types=['Namespace']) desired_state = [] for cluster, namespace, _, _ in ri: if cluster not in oc_map.clusters(): continue desired_state.append({"cluster": cluster, "namespace": namespace}) return oc_map, desired_state
def get_app_interface_settings(): """ Returns App Interface settings """ gqlapi = gql.get_api() settings = gqlapi.query(APP_INTERFACE_SETTINGS_QUERY)['settings'] if settings: # assuming a single settings file for now return settings[0] return None
def fetch_keys_to_delete(): gqlapi = gql.get_api() accounts = gqlapi.query(QUERY)['accounts'] return { account['name']: account['deleteKeys'] for account in accounts if account['deleteKeys'] not in (None, []) }
def get_slack_workspace(): """ Returns a single Slack workspace """ gqlapi = gql.get_api() slack_workspaces = \ gqlapi.query(SLACK_WORKSPACES_QUERY)['slack_workspaces'] if len(slack_workspaces) != 1: logging.warning('multiple Slack workspaces found.') return gqlapi.query(SLACK_WORKSPACES_QUERY)['slack_workspaces'][0]
def get_known_hosts(self, jh): known_hosts_path = jh['knownHosts'] gqlapi = gql.get_api() try: known_hosts = gqlapi.get_resource(known_hosts_path) except gql.GqlApiError as e: raise FetchResourceError(e.message) return known_hosts['content']
def run(dry_run=False, io_dir='throughput/'): gqlapi = gql.get_api() jira_boards = gqlapi.query(QUERY)['jira_boards'] for jira_board in jira_boards: jira, current_state = fetch_current_state(jira_board) previous_state = fetch_previous_state(io_dir, jira.project) if previous_state: diffs = calculate_diff(jira.server, current_state, previous_state) act(dry_run, jira_board, diffs) write_state(io_dir, jira.project, current_state)
def get_desired_state(slack_map): gqlapi = gql.get_api() roles = gqlapi.query(ROLES_QUERY)['roles'] all_users = gqlapi.query(USERS_QUERY)['users'] desired_state = [] for r in roles: for p in r['permissions']: if p['service'] != 'slack-usergroup': continue workspace = p['workspace'] managed_usergroups = workspace['managedUsergroups'] if managed_usergroups is None: continue workspace_name = workspace['name'] usergroup = p['handle'] if usergroup not in managed_usergroups: logging.warning( '[{}] usergroup {} not in managed usergroups {}'.format( workspace_name, usergroup, managed_usergroups )) continue slack = slack_map[workspace_name]['slack'] ugid = slack.get_usergroup_id(usergroup) user_names = [get_slack_username(u) for u in r['users']] slack_usernames_pagerduty = \ get_slack_usernames_from_pagerduty(p['pagerduty'], all_users) user_names.extend(slack_usernames_pagerduty) slack_usernames_github = \ get_slack_usernames_from_github_owners(p['github_owners'], all_users) user_names.extend(slack_usernames_github) users = slack.get_users_by_names(user_names) channel_names = [] if p['channels'] is None else p['channels'] channels = slack.get_channels_by_names(channel_names) desired_state.append({ "workspace": workspace_name, "usergroup": usergroup, "usergroup_id": ugid, "users": users, "channels": channels, }) return desired_state
def get_gitlab_repos(server): gqlapi = gql.get_api() apps = gqlapi.query(APPS_QUERY)['apps'] code_components_lists = [a['codeComponents'] for a in apps if a['codeComponents'] is not None] code_components = [item for sublist in code_components_lists for item in sublist] repos = [c['url'] for c in code_components if c['url'].startswith(server)] return repos
def init_users(): gqlapi = gql.get_api() result = gqlapi.query(QUERY)['users'] users = defaultdict(list) for user in result: u = user['redhat_username'] p = 'data' + user['path'] users[u].append(p) return [{'username': username, 'paths': paths} for username, paths in users.items()]
def fetch_current_state(thread_pool_size, internal, use_jump_host): gqlapi = gql.get_api() clusters = gqlapi.query(CLUSTERS_QUERY)['clusters'] clusters = [c for c in clusters if c.get('ocm') is None] settings = queries.get_app_interface_settings() oc_map = OC_Map(clusters=clusters, integration=QONTRACT_INTEGRATION, settings=settings, internal=internal, use_jump_host=use_jump_host) results = threaded.run(get_cluster_users, oc_map.clusters(), thread_pool_size, oc_map=oc_map) current_state = [item for sublist in results for item in sublist] return oc_map, current_state
def run(dry_run): gqlapi = gql.get_api() apps = gqlapi.query(QUAY_REPOS_QUERY)['apps'] quay_api_store = get_quay_api_store() error = False for app in apps: quay_repo_configs = app.get('quayRepos') if not quay_repo_configs: continue for quay_repo_config in quay_repo_configs: org_name = quay_repo_config['org']['name'] quay_api = quay_api_store[org_name]['api'] teams = quay_repo_config.get('teams') if not teams: continue repos = quay_repo_config['items'] for repo in repos: repo_name = repo['name'] for team in teams: permissions = team['permissions'] role = team['role'] for permission in permissions: if permission['service'] != 'quay-membership': logging.warning('wrong service kind, ' + 'should be quay-membership') continue if permission['org'] != org_name: logging.warning('wrong org, ' + f'should be {org_name}') continue team_name = permission['team'] current_role = \ quay_api.get_repo_team_permissions( repo_name, team_name) if current_role != role: logging.info([ 'update_role', org_name, repo_name, team_name, role ]) if not dry_run: try: quay_api.set_repo_team_permissions( repo_name, team_name, role) except Exception as e: error = True logging.error( 'could not set repo permissions: ' + f'repo name: {repo_name}, ' + f'team name: {team_name}. ' + f'details: {str(e)}') if error: sys.exit(ExitCodes.ERROR)
def get_values(self, path): gqlapi = gql.get_api() try: raw_values = gqlapi.get_resource(path) except gql.GqlApiError as e: raise FetchResourceError(e.message) try: values = anymarkup.parse(raw_values['content'], force_types=None) except anymarkup.AnyMarkupError: e_msg = "Could not parse data. Skipping resource: {}" raise FetchResourceError(e_msg.format(path)) return values