def run(dry_run=False): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) current_state = get_current_state(instance, gl) desired_state = get_desired_state(instance, gl) diffs = calculate_diff(current_state, desired_state) for diff in diffs: logging.info(list(diff.values())) if not dry_run: act(diff, gl)
def run(dry_run, gitlab_project_id=None, gitlab_merge_request_id=None): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, project_id=gitlab_project_id, settings=settings) project_labels = gl.get_project_labels() labels = gl.get_merge_request_labels(gitlab_merge_request_id) changed_paths = \ gl.get_merge_request_changed_paths(gitlab_merge_request_id) guessed_labels = guess_labels(project_labels, changed_paths) labels_to_add = [l for l in guessed_labels if l not in labels] if labels_to_add: logging.info(['add_labels', labels_to_add]) gl.add_labels_to_merge_request(gitlab_merge_request_id, labels_to_add)
def run(dry_run, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) trigger_specs = saasherder.get_configs_diff() already_triggered = [] error = True # enter loop while error: error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name( saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_config(job_spec) except Exception as e: error = True logging.error(f"could not trigger job {job_name} " + f"in {instance_name}. details: {str(e)}") if error: time.sleep(10) # add to contants module once created
def run(dry_run=False, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) if not saasherder.valid: sys.exit(1) trigger_specs = saasherder.get_moving_commits_diff(dry_run) already_triggered = [] error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name(saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_moving_commit(job_spec) except Exception: error = True logging.error( f"could not trigger job {job_name} in {instance_name}.") if error: sys.exit(1)
def run(dry_run=False, thread_pool_size=10): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) repos = queries.get_repos(server=gl.server) app_sre = gl.get_app_sre_group_users() results = threaded.run(get_members_to_add, repos, thread_pool_size, gl=gl, app_sre=app_sre) members_to_add = [item for sublist in results for item in sublist] for m in members_to_add: logging.info(['add_maintainer', m["repo"], m["user"].username]) if not dry_run: gl.add_project_member(m["repo"], m["user"])
def run(dry_run=False, days_interval=15, enable_closing=False, limit=1): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_housekeeping(server=instance['url']) for repo in repos: project_url = repo['url'] gl = GitLabApi(instance, project_url=project_url, settings=settings) handle_stale_items(dry_run, gl, days_interval, enable_closing, 'issue') handle_stale_items(dry_run, gl, days_interval, enable_closing, 'merge-request') merge_merge_requests(dry_run, gl, limit) if repo['enable_rebase']: rebase_merge_requests(dry_run, gl, limit)
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) previous_urls = queries.get_jenkins_instances_previous_urls() repos = queries.get_repos(server=gl.server) for repo in repos: hooks = gl.get_project_hooks(repo) for hook in hooks: hook_url = hook.url for previous_url in previous_urls: if hook_url.startswith(previous_url): logging.info(['delete_hook', repo, hook_url]) if not dry_run: hook.delete()
def run(dry_run, gitlab_project_id=None, thread_pool_size=10): users = init_users() user_specs = threaded.run(init_user_spec, users, thread_pool_size) users_to_delete = [(username, paths) for username, delete, paths in user_specs if delete] if not dry_run: instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, project_id=gitlab_project_id, settings=settings) for username, paths in users_to_delete: logging.info(['delete_user', username]) if not dry_run: gl.create_delete_user_mr(username, paths)
def init(gitlab_project_id=None, override_pr_gateway_type=None): pr_gateway_type = override_pr_gateway_type or get_pr_gateway_type() if pr_gateway_type == 'gitlab': instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() if gitlab_project_id is None: raise PullRequestGatewayError('missing gitlab project id') return GitLabApi(instance, project_id=gitlab_project_id, settings=settings) elif pr_gateway_type == 'sqs': accounts = queries.get_aws_accounts() settings = queries.get_app_interface_settings() return SQSGateway(accounts, settings=settings) else: raise PullRequestGatewayError( 'invalid pull request gateway: {}'.format(pr_gateway_type))
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) secret_reader = SecretReader(settings=settings) # Jira repos = queries.get_repos_gitlab_jira(server=gl.server) for repo in repos: skip = False repo_url = repo['url'] project = gl.get_project(repo_url=repo_url) services = project.services current_jira = services.get('jira') desired_jira = repo['jira'] desired_jira_url = desired_jira['serverUrl'] desired_jira_crdentials = \ secret_reader.read_all(desired_jira['token']) if current_jira.active: properties = current_jira.properties desired_jira_username = desired_jira_crdentials['username'] if properties['url'] == desired_jira_url \ and properties['username'] == desired_jira_username: skip = True if skip: continue logging.info(['update_jira', repo_url, desired_jira_url]) if not dry_run: new_data = { 'active': True, 'url': desired_jira_url, 'username': desired_jira_crdentials['username'], 'password': desired_jira_crdentials['password'], 'commit_events': True, 'merge_requests_events': True, 'comment_on_event_enabled': False } services.update('jira', new_data=new_data)
def run(dry_run=False, thread_pool_size=10, saas_file_name=None, env_name=None, defer=None): saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings) if not saasherder.valid: sys.exit(1) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # if saas_file_name is defined, the integration # is being called from multiple running instances ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name) if ri.has_error_registered(): sys.exit(1)
def run(dry_run=False, thread_pool_size=10, defer=None): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() aws_accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) saas_files = queries.get_saas_files() saasherder = SaasHerder(saas_files, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) enable_deletion = False if ri.has_error_registered() else True ob.realize_data(dry_run, oc_map, ri, enable_deletion=enable_deletion) saasherder.slack_notify(dry_run, aws_accounts, ri)
def run(dry_run, wait_for_pipeline): default_days_interval = 15 default_limit = 8 default_enable_closing = False instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_housekeeping(server=instance['url']) for repo in repos: hk = repo['housekeeping'] project_url = repo['url'] days_interval = hk.get('days_interval') or default_days_interval enable_closing = hk.get('enable_closing') or default_enable_closing limit = hk.get('limit') or default_limit gl = GitLabApi(instance, project_url=project_url, settings=settings) handle_stale_items(dry_run, gl, days_interval, enable_closing, 'issue') handle_stale_items(dry_run, gl, days_interval, enable_closing, 'merge-request') rebase = hk.get('rebase') try: merge_merge_requests(dry_run, gl, limit, rebase, insist=True, wait_for_pipeline=wait_for_pipeline) except Exception: merge_merge_requests(dry_run, gl, limit, rebase, wait_for_pipeline=wait_for_pipeline) if rebase: rebase_merge_requests(dry_run, gl, limit, wait_for_pipeline=wait_for_pipeline)
def run(dry_run, gitlab_project_id): settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() sqs_cli = SQSGateway(accounts, settings=settings) instance = queries.get_gitlab_instance() saas_files = queries.get_saas_files_minimal() gitlab_cli = GitLabApi(instance, project_id=gitlab_project_id, settings=settings, saas_files=saas_files) while True: messages = sqs_cli.receive_messages() logging.info('received %s messages', len(messages)) if not messages: break for message in messages: # Let's first delete all the message we received, # otherwise they will come back in 30s. receipt_handle = message[0] sqs_cli.delete_message(str(receipt_handle)) for message in messages: # Time to process the messages. Any failure here is not # critical, even though we already deleted the messaged, # since the producers will keep re-sending the message # until the MR gets merged to app-interface receipt_handle, body = message[0], message[1] logging.info('received message %s with body %s', receipt_handle[:6], json.dumps(body)) if not dry_run: merge_request = mr.init_from_sqs_message(body) merge_request.submit_to_gitlab(gitlab_cli=gitlab_cli)
def get_gitlab_api(): config = get_config() gitlab_config = config['gitlab'] server = gitlab_config['server'] token = gitlab_config['token'] return GitLabApi(server, token, ssl_verify=False)
def get_gitlab_api(): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() return GitLabApi(instance, settings=settings)
def run(dry_run, thread_pool_size=10, saas_file_name=None, env_name=None, defer=None): saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() desired_jenkins_instances = [s['instance']['name'] for s in saas_files] jenkins_map = jenkins_base.get_jenkins_map( desired_instances=desired_jenkins_instances) settings = queries.get_app_interface_settings() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, jenkins_map=jenkins_map) if not saasherder.valid: sys.exit(1) if len(saasherder.namespaces) == 0: logging.warning('no targets found') sys.exit(0) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, init_api_resources=True) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # if saas_file_name is defined, the integration # is being called from multiple running instances actions = ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name, wait_for_namespace=True, no_dry_run_skip_compare=(not saasherder.compare), take_over=saasherder.take_over) if ri.has_error_registered(): sys.exit(1) # send human readable notifications to slack # we only do this if: # - this is not a dry run # - there is a single saas file deployed # - output is 'events' # - no errors were registered if not dry_run and len(saasherder.saas_files) == 1: saas_file = saasherder.saas_files[0] slack_info = saas_file.get('slack') if slack_info and actions and slack_info.get('output') == 'events': slack = init_slack(slack_info, QONTRACT_INTEGRATION) for action in actions: message = \ f"[{action['cluster']}] " + \ f"{action['kind']} {action['name']} {action['action']}" slack.chat_post_message(message)
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_owner(server=instance['url']) for repo in repos: gitlab_cli = GitLabApi(instance, project_url=repo, settings=settings) project_owners = RepoOwners(git_cli=gitlab_cli) for mr in gitlab_cli.get_merge_requests(state=MRState.OPENED): mr_approval = MRApproval(gitlab_client=gitlab_cli, merge_request=mr, owners=project_owners, dry_run=dry_run) approval_status = mr_approval.get_approval_status() if approval_status['approved']: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- already approved' ]) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- approving now' ]) if not dry_run: gitlab_cli.add_label_to_merge_request( mr.iid, APPROVAL_LABEL) continue if not dry_run: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- removing approval' ]) gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) if approval_status['report'] is not None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- publishing approval report' ]) if not dry_run: gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) mr.notes.create({'body': approval_status['report']}) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- not fully approved' ])
def init_gitlab(gitlab_project_id): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() return GitLabApi(instance, project_id=gitlab_project_id, settings=settings)
def act(repo, dry_run, instance, settings): gitlab_cli = GitLabApi(instance, project_url=repo, settings=settings) project_owners = RepoOwners(git_cli=gitlab_cli) for mr in gitlab_cli.get_merge_requests(state=MRState.OPENED): mr_approval = MRApproval(gitlab_client=gitlab_cli, merge_request=mr, owners=project_owners, dry_run=dry_run) if mr_approval.top_commit_created_at is None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- skipping' ]) continue approval_status = mr_approval.get_approval_status() if approval_status['approved']: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- already approved' ]) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- approving now' ]) if not dry_run: gitlab_cli.add_label_to_merge_request(mr.iid, APPROVAL_LABEL) continue if not dry_run: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- removing approval' ]) gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) if approval_status['report'] is not None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- publishing approval report' ]) if not dry_run: gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) mr.notes.create({'body': approval_status['report']}) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- not fully approved' ])