def init_oc_client(self, cluster_info): cluster = cluster_info['name'] if self.oc_map.get(cluster): return if self.cluster_disabled(cluster_info): return if self.internal is not None: # integration is executed with `--internal` or `--external` # filter out non matching clusters if self.internal and not cluster_info['internal']: return if not self.internal and cluster_info['internal']: return automation_token = cluster_info.get('automationToken') if automation_token is None: self.set_oc(cluster, False) else: server_url = cluster_info['serverUrl'] token = secret_reader.read(automation_token, self.settings) if self.use_jump_host: jump_host = cluster_info.get('jumpHost') else: jump_host = None self.set_oc( cluster, OC(server_url, token, jump_host, settings=self.settings, init_projects=self.init_projects, init_api_resources=self.init_api_resources))
def run(dry_run): gqlapi = gql.get_api() result = gqlapi.query(REPOS_QUERY) config = get_config()['github-repo-invites'] settings = queries.get_app_interface_settings() secret = {'path': config['secret_path'], 'field': config['secret_field']} token = secret_reader.read(secret, settings=settings) g = utils.raw_github_api.RawGithubApi(token) urls = [] for app in result['apps_v1']: code_components = app['codeComponents'] if code_components is None: continue for code_component in app['codeComponents']: urls.append(code_component['url']) for i in g.repo_invitations(): invitation_id = i['id'] invitation_url = i['html_url'] url = os.path.dirname(invitation_url) if url in urls: logging.info(['accept', url]) if not dry_run: g.accept_repo_invitation(invitation_id) else: logging.debug(['skipping', url])
def init_ocm_client(self, cluster_info): """ Initiate OCM client. Gets the OCM information and initiates an OCM client. Skip initiating OCM if it has already been initialized or if the current integration is disabled on it. :param cluster_info: Graphql cluster query result :type cluster_info: dict """ if self.cluster_disabled(cluster_info): return cluster_name = cluster_info['name'] ocm_info = cluster_info['ocm'] ocm_name = ocm_info['name'] # pointer from each cluster to its referenced OCM instance self.clusters_map[cluster_name] = ocm_name if self.ocm_map.get(ocm_name): return access_token_client_id = ocm_info.get('accessTokenClientId') access_token_url = ocm_info.get('accessTokenUrl') ocm_offline_token = ocm_info.get('offlineToken') if ocm_offline_token is None: self.ocm_map[ocm_name] = False else: url = ocm_info['url'] token = secret_reader.read(ocm_offline_token, self.settings) self.ocm_map[ocm_name] = \ OCM(url, access_token_client_id, access_token_url, token)
def __init__(self, instance, project_id=None, ssl_verify=True, settings=None, project_url=None): self.server = instance['url'] token = secret_reader.read(instance['token'], settings=settings) ssl_verify = instance['sslVerify'] if ssl_verify is None: ssl_verify = True self.gl = gitlab.Gitlab(self.server, private_token=token, ssl_verify=ssl_verify) self.gl.auth() self.user = self.gl.user if project_id is None: # When project_id is not provide, we try to get the project # using the project_url if project_url is not None: parsed_project_url = urlparse(project_url) name_with_namespace = parsed_project_url.path.strip('/') self.project = self.gl.projects.get(name_with_namespace) else: self.project = self.gl.projects.get(project_id)
def __init__(self, token, ssl_verify=True, settings=None): token_config = secret_reader.read(token, settings=settings) config = toml.loads(token_config) self.url = config['jenkins']['url'] self.user = config['jenkins']['user'] self.password = config['jenkins']['password'] self.ssl_verify = ssl_verify self.should_restart = False
def run(dry_run=False): settings = queries.get_app_interface_settings() gqlapi = gql.get_api() github = init_github() # Reconcile against all sentry instances result = gqlapi.query(SENTRY_INSTANCES_QUERY) for instance in result['instances']: token = secret_reader.read(instance['automationToken'], settings=settings) host = instance['consoleUrl'] sentry_client = SentryClient(host, token) skip_user = secret_reader.read(instance['adminUser'], settings=settings) current_state = fetch_current_state(sentry_client, [skip_user]) desired_state = fetch_desired_state(gqlapi, instance, github) reconciler = SentryReconciler(sentry_client, dry_run) reconciler.reconcile(current_state, desired_state)
def _initiate_github(self, saas_file): auth = saas_file.get('authentication') or {} auth_code = auth.get('code') or {} if auth_code: token = secret_reader.read(auth_code, settings=self.settings) else: config = get_config() github_config = config['github'] token = github_config['app-sre']['token'] base_url = os.environ.get('GITHUB_API', 'https://api.github.com') return Github(token, base_url=base_url)
def bot_login(ctx, cluster_name): clusters = queries.get_clusters() clusters = [c for c in clusters if c['name'] == cluster_name] if len(clusters) == 0: print(f"{cluster_name} not found.") sys.exit(1) cluster = clusters[0] settings = queries.get_app_interface_settings() server = cluster['serverUrl'] token = secret_reader.read(cluster['automationToken'], settings=settings) print(f"oc login --server {server} --token {token}")
def _initiate_github(self, saas_file): auth = saas_file.get('authentication') or {} auth_code = auth.get('code') or {} if auth_code: token = secret_reader.read(auth_code, settings=self.settings) else: # use the app-sre token by default default_org_name = 'app-sre' config = get_config(desired_org_name=default_org_name) token = config['github'][default_org_name]['token'] base_url = os.environ.get('GITHUB_API', 'https://api.github.com') return Github(token, base_url=base_url)
def __init__(self, workspace_name, token, settings=None, init_usergroups=True, **chat_kwargs): self.workspace_name = workspace_name slack_token = secret_reader.read(token, settings=settings) self.sc = SlackClient(slack_token) self.results = {} self.chat_kwargs = chat_kwargs if init_usergroups: self._initiate_usergroups()
def get_config(): gqlapi = gql.get_api() orgs = gqlapi.query(ORGS_QUERY)['orgs'] settings = queries.get_app_interface_settings() config = {'github': {}} for org in orgs: org_name = org['name'] token = secret_reader.read(org['token'], settings=settings) org_config = {'token': token, 'managed_teams': org['managedTeams']} config['github'][org_name] = org_config return config
def collect_configs(self, configs): gqlapi = gql.get_api() instances = \ {c['instance']['name']: { 'serverUrl': c['instance']['serverUrl'], 'token': c['instance']['token'], 'delete_method': c['instance']['deleteMethod']} for c in configs} working_dirs = {} instance_urls = {} for name, data in instances.items(): token = data['token'] server_url = data['serverUrl'] wd = tempfile.mkdtemp() ini = secret_reader.read(token, settings=self.settings) ini = ini.replace('"', '') ini = ini.replace('false', 'False') ini_file_path = '{}/{}.ini'.format(wd, name) with open(ini_file_path, 'w') as f: f.write(ini) f.write('\n') working_dirs[name] = wd instance_urls[name] = server_url self.sort(configs) for c in configs: instance_name = c['instance']['name'] config = c['config'] config_file_path = \ '{}/config.yaml'.format(working_dirs[instance_name]) if config: with open(config_file_path, 'a') as f: yaml.dump(yaml.load(config, Loader=yaml.FullLoader), f) f.write('\n') else: config_path = c['config_path'] # get config data try: config_resource = gqlapi.get_resource(config_path) config = config_resource['content'] except gql.GqlGetResourceError as e: raise FetchResourceError(str(e)) with open(config_file_path, 'a') as f: f.write(config) f.write('\n') self.instances = instances self.instance_urls = instance_urls self.working_dirs = working_dirs
def get_ecrypted_credentials(credentials_name, user, settings): credentials_map = settings['credentials'] credentials_map_item = \ [c for c in credentials_map if c['name'] == credentials_name] if len(credentials_map_item) != 1: return None secret = credentials_map_item[0]['secret'] credentials = secret_reader.read(secret, settings=settings) recepient = smtp_client.get_recepient(user['org_username'], settings) public_gpg_key = user['public_gpg_key'] encrypted_credentials = \ gpg_encrypt(credentials, recepient, public_gpg_key) return encrypted_credentials
def get_quay_api_store(): store = {} gqlapi = gql.get_api() result = gqlapi.query(QUAY_ORG_CATALOG_QUERY) settings = queries.get_app_interface_settings() for org_data in result['quay_orgs']: name = org_data['name'] token = secret_reader.read(org_data['automationToken'], settings=settings) store[name] = QuayApi(token, name) return store
def calculate_performance_latency(self, component, ns, metric): metric_selectors = json.loads(metric['selectors']) metric_name = metric['metric'] selectors = metric_selectors.copy() selectors['namespace'] = ns['name'] settings = queries.get_app_interface_settings() prom_info = ns['cluster']['prometheus'] prom_auth_creds = secret_reader.read(prom_info['auth'], settings) prom_auth = requests.auth.HTTPBasicAuth(*prom_auth_creds.split(':')) percentile = float(metric['percentile']) / 100 prom_selectors = self.promqlify(selectors) promql_query = f""" histogram_quantile({percentile}, sum by (le) (increase( {metric_name}{{ {prom_selectors}, code!~"5.." }}[30d])) ) """ result = promql( prom_info['url'], promql_query, auth=prom_auth, ) if len(result) != 1: logging.error(("unexpected promql result:\n" f"url: {prom_info['url']}\n" f"query: {promql_query}")) return None latency = float(result[0]['value'][1]) latency_slo_met = latency <= float(metric['threshold']) return { 'component': component, 'type': 'latency', 'selectors': self.promqlify(metric_selectors), 'latency': round(latency, 2), 'latency_slo_met': latency_slo_met, }
def promquery(cluster, query): """Run a PromQL query""" config_data = config.get_config() auth = { 'path': config_data['promql-auth']['secret_path'], 'field': 'token' } settings = queries.get_app_interface_settings() prom_auth_creds = secret_reader.read(auth, settings) prom_auth = requests.auth.HTTPBasicAuth(*prom_auth_creds.split(':')) url = f"https://prometheus.{cluster}.devshift.net/api/v1/query" response = requests.get(url, params={'query': query}, auth=prom_auth) response.raise_for_status() print(json.dumps(response.json(), indent=4))
def run(dry_run): base_url = os.environ.get('GITHUB_API', 'https://api.github.com') orgs = queries.get_github_orgs() settings = queries.get_app_interface_settings() error = False for org in orgs: org_name = org['name'] token = secret_reader.read(org['token'], settings=settings) gh = Github(token, base_url=base_url) gh_org = gh.get_organization(org_name) current_2fa = gh_org.two_factor_requirement_enabled desired_2fa = org['two_factor_authentication'] or False if current_2fa != desired_2fa: logging.error(f"2FA mismatch for {org_name}") error = True if error: sys.exit(1)
def get_quay_api_store(): store = {} gqlapi = gql.get_api() result = gqlapi.query(QUAY_ORG_CATALOG_QUERY) settings = queries.get_app_interface_settings() for org_data in result['quay_orgs']: name = org_data['name'] server_url = org_data.get('serverUrl') token = secret_reader.read(org_data['automationToken'], settings=settings) managed_teams = org_data.get('managedTeams') store[name] = {} for team in managed_teams: store[name][team] = QuayApi(token, name, team, base_url=server_url) return store
def fetch_current_state(unleash_instance): api_url = f"{unleash_instance['url']}/api" admin_access_token = \ secret_reader.read(unleash_instance['token'], settings=queries.get_app_interface_settings()) return get_feature_toggles(api_url, admin_access_token)
def __init__(self, jh, settings=None): self.hostname = jh['hostname'] self.user = jh['user'] self.port = 22 if jh['port'] is None else jh['port'] self.identity = secret_reader.read(jh['identity'], settings=settings) self.init_identity_file()
def __init__(self, token, settings=None): pd_api_key = secret_reader.read(token, settings=settings) pypd.api_key = pd_api_key
def calculate_performance_availability(self, component, ns, metric): metric_selectors = json.loads(metric['selectors']) metric_name = metric['metric'] settings = queries.get_app_interface_settings() prom_info = ns['cluster']['prometheus'] prom_auth_creds = secret_reader.read(prom_info['auth'], settings) prom_auth = requests.auth.HTTPBasicAuth(*prom_auth_creds.split(':')) # volume vol_selectors = metric_selectors.copy() vol_selectors['namespace'] = ns['name'] prom_vol_selectors = self.promqlify(vol_selectors) vol_promql_query = (f"sum(increase({metric_name}" f"{{{prom_vol_selectors}}}[30d]))") vol_promql_query_result = promql( prom_info['url'], vol_promql_query, auth=prom_auth, ) if len(vol_promql_query_result) != 1: logging.error(("unexpected promql result:\n" f"url: {prom_info['url']}\n" f"query: {vol_promql_query}")) return None volume = int(float(vol_promql_query_result[0]['value'][1])) # availability avail_selectors = metric_selectors.copy() avail_selectors['namespace'] = ns['name'] prom_avail_selectors = self.promqlify(avail_selectors) avail_promql_query = f""" sum(increase( {metric_name}{{{prom_avail_selectors}, code!~"5.."}}[30d] )) / sum(increase( {metric_name}{{{prom_avail_selectors}}}[30d] )) * 100 """ avail_promql_query_result = promql( prom_info['url'], avail_promql_query, auth=prom_auth, ) if len(avail_promql_query_result) != 1: logging.error(("unexpected promql result:\n" f"url: {prom_info['url']}\n" f"query: {avail_promql_query}")) return None availability = float(avail_promql_query_result[0]['value'][1]) target_slo = 100 - float(metric['errorBudget']) availability_slo_met = availability >= target_slo return { 'component': component, 'type': 'availability', 'selectors': self.promqlify(metric_selectors), 'total_requests': volume, 'availability': round(availability, 2), 'availability_slo_met': availability_slo_met, }
def __init__(self, instance, repo_url, settings): parsed_repo_url = urlparse(repo_url) repo = parsed_repo_url.path.strip('/') token = secret_reader.read(instance['token'], settings=settings) git_cli = github.Github(token, base_url=GH_BASE_URL) self.repo = git_cli.get_repo(repo)