def run(dry_run, gitlab_project_id=None, gitlab_merge_request_id=None): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, project_id=gitlab_project_id, settings=settings) labels = gl.get_merge_request_labels(gitlab_merge_request_id) output = 'yes' if 'skip-ci' in labels else 'no' print(output)
def run(dry_run=False, thread_pool_size=10): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) repos = queries.get_repos(server=gl.server) app_sre = gl.get_app_sre_group_users() results = threaded.run(get_members_to_add, repos, thread_pool_size, gl=gl, app_sre=app_sre) members_to_add = [item for sublist in results for item in sublist] for m in members_to_add: logging.info(['add_maintainer', m["repo"], m["user"].username]) if not dry_run: gl.add_project_member(m["repo"], m["user"])
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() code_components = queries.get_code_components() app_int_repos = [c['url'] for c in code_components] saas_bundle_repos = [ c['url'] for c in code_components if c['resource'] == 'bundle' ] gl = GitLabApi(instance, settings=settings) project_requests = instance['projectRequests'] or [] error = False for pr in project_requests: group = pr['group'] group_id, existing_projects = gl.get_group_id_and_projects(group) requested_projects = pr['projects'] projects_to_create = [ p for p in requested_projects if p not in existing_projects ] for p in projects_to_create: project_url = gl.get_project_url(group, p) if project_url not in app_int_repos: logging.error(f'{project_url} missing from all codeComponents') error = True continue logging.info(['create_project', group, p]) if not dry_run: gl.create_project(group_id, p) if project_url in saas_bundle_repos: logging.info(['initiate_saas_bundle_repo', group, p]) if not dry_run: gl.initiate_saas_bundle_repo(project_url) sys.exit(error)
def __init__(self, project_id, mr_id, maintainers_group): self.exit_code = self.OK self.maintainers_group = maintainers_group self.instance = queries.get_gitlab_instance() self.settings = queries.get_app_interface_settings() self.gl_cli = GitLabApi(self.instance, project_id=project_id, settings=self.settings) self.mr = self.gl_cli.get_merge_request(mr_id) self.src = GitLabApi(self.instance, project_id=self.mr.source_project_id, settings=self.settings)
def run(dry_run): default_days_interval = 15 default_limit = 8 default_enable_closing = False instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_housekeeping(server=instance['url']) for repo in repos: hk = repo['housekeeping'] project_url = repo['url'] days_interval = hk.get('days_interval') or default_days_interval enable_closing = hk.get('enable_closing') or default_enable_closing limit = hk.get('limit') or default_limit gl = GitLabApi(instance, project_url=project_url, settings=settings) handle_stale_items(dry_run, gl, days_interval, enable_closing, 'issue') handle_stale_items(dry_run, gl, days_interval, enable_closing, 'merge-request') rebase = hk.get('rebase') try: merge_merge_requests(dry_run, gl, limit, rebase, insist=True) except Exception: merge_merge_requests(dry_run, gl, limit, rebase) if rebase: rebase_merge_requests(dry_run, gl, limit)
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) previous_urls = queries.get_jenkins_instances_previous_urls() repos = queries.get_repos(server=gl.server) for repo in repos: hooks = gl.get_project_hooks(repo) for hook in hooks: hook_url = hook.url for previous_url in previous_urls: if hook_url.startswith(previous_url): logging.info(['delete_hook', repo, hook_url]) if not dry_run: hook.delete()
def run(dry_run, gitlab_project_id=None, thread_pool_size=10): users = init_users() user_specs = threaded.run(init_user_spec, users, thread_pool_size) users_to_delete = [(username, paths) for username, delete, paths in user_specs if delete] if not dry_run: instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, project_id=gitlab_project_id, settings=settings) for username, paths in users_to_delete: logging.info(['delete_user', username]) if not dry_run: gl.create_delete_user_mr(username, paths)
def get_gitlab_api(): config = get_config() gitlab_config = config['gitlab'] server = gitlab_config['server'] token = gitlab_config['token'] return GitLabApi(server, token, ssl_verify=False)
def get_housekeeping_gitlab_api(): config = get_config() gitlab_config = config['gitlab'] server = gitlab_config['server'] token = gitlab_config['token'] project_id = gitlab_config['housekeeping']['project_id'] return GitLabApi(server, token, project_id=project_id, ssl_verify=False)
def get_app_interface_gitlab_api(): config = get_config() gitlab_config = config['gitlab'] server = gitlab_config['server'] token = gitlab_config['token'] project_id = gitlab_config['app-interface']['project_id'] return GitLabApi(server, token, project_id=project_id, ssl_verify=False)
def run(dry_run, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) # Remove saas-file targets that are disabled for saas_file in saas_files[:]: resource_templates = saas_file['resourceTemplates'] for rt in resource_templates[:]: targets = rt['targets'] for target in targets[:]: if target['disable']: targets.remove(target) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) trigger_specs = saasherder.get_moving_commits_diff(dry_run) already_triggered = [] error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name(saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_moving_commit(job_spec) except Exception: error = True logging.error( f"could not trigger job {job_name} in {instance_name}.") if error: sys.exit(1)
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) secret_reader = SecretReader(settings=settings) # Jira repos = queries.get_repos_gitlab_jira(server=gl.server) for repo in repos: skip = False repo_url = repo['url'] project = gl.get_project(repo_url=repo_url) services = project.services current_jira = services.get('jira') desired_jira = repo['jira'] desired_jira_url = desired_jira['serverUrl'] desired_jira_crdentials = \ secret_reader.read_all(desired_jira['token']) if current_jira.active: properties = current_jira.properties desired_jira_username = desired_jira_crdentials['username'] if properties['url'] == desired_jira_url \ and properties['username'] == desired_jira_username: skip = True if skip: continue logging.info(['update_jira', repo_url, desired_jira_url]) if not dry_run: new_data = { 'active': True, 'url': desired_jira_url, 'username': desired_jira_crdentials['username'], 'password': desired_jira_crdentials['password'], 'commit_events': True, 'merge_requests_events': True, 'comment_on_event_enabled': False } services.update('jira', new_data=new_data)
def __new__(cls, url, *args, **kwargs): parsed_url = urlparse(url) settings = queries.get_app_interface_settings() if 'github' in parsed_url.hostname: instance = queries.get_github_instance() return GithubApi(instance, repo_url=url, settings=settings) if 'gitlab' in parsed_url.hostname: instance = queries.get_gitlab_instance() return GitLabApi(instance, project_url=url, settings=settings)
def run(dry_run, thread_pool_size=10, saas_file_name=None, env_name=None, defer=None): saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() desired_jenkins_instances = [s['instance']['name'] for s in saas_files] jenkins_map = jenkins_base.get_jenkins_map( desired_instances=desired_jenkins_instances) settings = queries.get_app_interface_settings() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, jenkins_map=jenkins_map) if not saasherder.valid: sys.exit(1) if len(saasherder.namespaces) == 0: logging.warning('no targets found') sys.exit(0) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # if saas_file_name is defined, the integration # is being called from multiple running instances ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name, wait_for_namespace=True, no_dry_run_skip_compare=True, take_over=saasherder.take_over) if ri.has_error_registered(): sys.exit(1)
def run(dry_run, thread_pool_size=10): saas_files = queries.get_saas_files() if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) jenkins_map = jenkins_base.get_jenkins_map() saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, accounts=accounts) if not saasherder.valid: sys.exit(1) trigger_specs = saasherder.get_configs_diff() already_triggered = [] error = True # enter loop while error: error = False for job_spec in trigger_specs: saas_file_name = job_spec['saas_file_name'] env_name = job_spec['env_name'] instance_name = job_spec['instance_name'] job_name = get_openshift_saas_deploy_job_name( saas_file_name, env_name, settings) if job_name not in already_triggered: logging.info(['trigger_job', instance_name, job_name]) if dry_run: already_triggered.append(job_name) if not dry_run: jenkins = jenkins_map[instance_name] try: if job_name not in already_triggered: jenkins.trigger_job(job_name) already_triggered.append(job_name) saasherder.update_config(job_spec) except Exception as e: error = True logging.error(f"could not trigger job {job_name} " + f"in {instance_name}. details: {str(e)}") if error: time.sleep(10) # add to contants module once created
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, settings=settings) current_state = get_current_state(instance, gl) desired_state = get_desired_state(instance, gl) diffs = calculate_diff(current_state, desired_state) for diff in diffs: logging.info(list(diff.values())) if not dry_run: act(diff, gl)
def run(dry_run=False, days_interval=15, enable_closing=False, limit=1): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_housekeeping(server=instance['url']) for repo in repos: project_url = repo['url'] gl = GitLabApi(instance, project_url=project_url, settings=settings) handle_stale_items(dry_run, gl, days_interval, enable_closing, 'issue') handle_stale_items(dry_run, gl, days_interval, enable_closing, 'merge-request') merge_merge_requests(dry_run, gl, limit) if repo['enable_rebase']: rebase_merge_requests(dry_run, gl, limit)
def run(dry_run, gitlab_project_id=None, gitlab_merge_request_id=None): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() gl = GitLabApi(instance, project_id=gitlab_project_id, settings=settings) project_labels = gl.get_project_labels() labels = gl.get_merge_request_labels(gitlab_merge_request_id) changed_paths = \ gl.get_merge_request_changed_paths(gitlab_merge_request_id) guessed_labels = guess_labels(project_labels, changed_paths) labels_to_add = [l for l in guessed_labels if l not in labels] if labels_to_add: logging.info(['add_labels', labels_to_add]) gl.add_labels_to_merge_request(gitlab_merge_request_id, labels_to_add)
def init(gitlab_project_id=None, override_pr_gateway_type=None): pr_gateway_type = override_pr_gateway_type or get_pr_gateway_type() if pr_gateway_type == 'gitlab': instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() if gitlab_project_id is None: raise PullRequestGatewayError('missing gitlab project id') return GitLabApi(instance, project_id=gitlab_project_id, settings=settings) elif pr_gateway_type == 'sqs': accounts = queries.get_aws_accounts() settings = queries.get_app_interface_settings() return SQSGateway(accounts, settings=settings) else: raise PullRequestGatewayError( 'invalid pull request gateway: {}'.format(pr_gateway_type))
def run(dry_run=False, thread_pool_size=10, saas_file_name=None, env_name=None, defer=None): saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings) if not saasherder.valid: sys.exit(1) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # if saas_file_name is defined, the integration # is being called from multiple running instances ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name) if ri.has_error_registered(): sys.exit(1)
def run(dry_run=False, thread_pool_size=10, defer=None): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() aws_accounts = queries.get_aws_accounts() gl = GitLabApi(instance, settings=settings) saas_files = queries.get_saas_files() saasherder = SaasHerder(saas_files, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) enable_deletion = False if ri.has_error_registered() else True ob.realize_data(dry_run, oc_map, ri, enable_deletion=enable_deletion) saasherder.slack_notify(dry_run, aws_accounts, ri)
def run(dry_run, gitlab_project_id): settings = queries.get_app_interface_settings() accounts = queries.get_aws_accounts() sqs_cli = SQSGateway(accounts, settings=settings) instance = queries.get_gitlab_instance() saas_files = queries.get_saas_files_minimal() gitlab_cli = GitLabApi(instance, project_id=gitlab_project_id, settings=settings, saas_files=saas_files) while True: messages = sqs_cli.receive_messages() logging.info('received %s messages', len(messages)) if not messages: break for message in messages: # Let's first delete all the message we received, # otherwise they will come back in 30s. receipt_handle = message[0] sqs_cli.delete_message(str(receipt_handle)) for message in messages: # Time to process the messages. Any failure here is not # critical, even though we already deleted the messaged, # since the producers will keep re-sending the message # until the MR gets merged to app-interface receipt_handle, body = message[0], message[1] logging.info('received message %s with body %s', receipt_handle[:6], json.dumps(body)) if not dry_run: merge_request = mr.init_from_sqs_message(body) merge_request.submit_to_gitlab(gitlab_cli=gitlab_cli)
def get_gitlab_api(): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() return GitLabApi(instance, settings=settings)
def run(dry_run, thread_pool_size=10, saas_file_name=None, env_name=None, defer=None): saas_files = queries.get_saas_files(saas_file_name, env_name) if not saas_files: logging.error('no saas files found') sys.exit(1) instance = queries.get_gitlab_instance() desired_jenkins_instances = [s['instance']['name'] for s in saas_files] jenkins_map = jenkins_base.get_jenkins_map( desired_instances=desired_jenkins_instances) settings = queries.get_app_interface_settings() try: gl = GitLabApi(instance, settings=settings) except Exception: # allow execution without access to gitlab # as long as there are no access attempts. gl = None saasherder = SaasHerder(saas_files, thread_pool_size=thread_pool_size, gitlab=gl, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, settings=settings, jenkins_map=jenkins_map) if not saasherder.valid: sys.exit(1) if len(saasherder.namespaces) == 0: logging.warning('no targets found') sys.exit(0) ri, oc_map = ob.fetch_current_state( namespaces=saasherder.namespaces, thread_pool_size=thread_pool_size, integration=QONTRACT_INTEGRATION, integration_version=QONTRACT_INTEGRATION_VERSION, init_api_resources=True) defer(lambda: oc_map.cleanup()) saasherder.populate_desired_state(ri) # if saas_file_name is defined, the integration # is being called from multiple running instances actions = ob.realize_data(dry_run, oc_map, ri, caller=saas_file_name, wait_for_namespace=True, no_dry_run_skip_compare=(not saasherder.compare), take_over=saasherder.take_over) if ri.has_error_registered(): sys.exit(1) # send human readable notifications to slack # we only do this if: # - this is not a dry run # - there is a single saas file deployed # - output is 'events' # - no errors were registered if not dry_run and len(saasherder.saas_files) == 1: saas_file = saasherder.saas_files[0] slack_info = saas_file.get('slack') if slack_info and actions and slack_info.get('output') == 'events': slack = init_slack(slack_info, QONTRACT_INTEGRATION) for action in actions: message = \ f"[{action['cluster']}] " + \ f"{action['kind']} {action['name']} {action['action']}" slack.chat_post_message(message)
def act(repo, dry_run, instance, settings): gitlab_cli = GitLabApi(instance, project_url=repo, settings=settings) project_owners = RepoOwners(git_cli=gitlab_cli) for mr in gitlab_cli.get_merge_requests(state=MRState.OPENED): mr_approval = MRApproval(gitlab_client=gitlab_cli, merge_request=mr, owners=project_owners, dry_run=dry_run) if mr_approval.top_commit_created_at is None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- skipping' ]) continue approval_status = mr_approval.get_approval_status() if approval_status['approved']: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- already approved' ]) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- approving now' ]) if not dry_run: gitlab_cli.add_label_to_merge_request(mr.iid, APPROVAL_LABEL) continue if not dry_run: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- removing approval' ]) gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) if approval_status['report'] is not None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- publishing approval report' ]) if not dry_run: gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) mr.notes.create({'body': approval_status['report']}) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- not fully approved' ])
class GitlabForkCompliance: OK = 0x0000 ERR_MASTER_BRANCH = 0x0001 ERR_NOT_A_MEMBER = 0x0002 ERR_NOT_A_MAINTAINER = 0x0004 def __init__(self, project_id, mr_id, maintainers_group): self.exit_code = self.OK self.maintainers_group = maintainers_group self.instance = queries.get_gitlab_instance() self.settings = queries.get_app_interface_settings() self.gl_cli = GitLabApi(self.instance, project_id=project_id, settings=self.settings) self.mr = self.gl_cli.get_merge_request(mr_id) self.src = GitLabApi(self.instance, project_id=self.mr.source_project_id, settings=self.settings) def run(self): self.exit_code |= self.check_branch() self.exit_code |= self.check_bot_access() if self.exit_code: sys.exit(self.exit_code) # At this point, we know that the bot is a maintainer, so # we check if all the maintainers are in the fork, adding those # who are not group = self.gl_cli.gl.groups.get(self.maintainers_group) maintainers = group.members.list() project_maintainers = self.src.get_project_maintainers() for member in maintainers: if member.username in project_maintainers: continue LOG.info([f'adding {member.username} as maintainer']) user_payload = {'user_id': member.id, 'access_level': MAINTAINER_ACCESS} member = self.src.project.members.create(user_payload) member.save() # Last but not least, we remove the blocked label, in case # it is set mr_labels = self.gl_cli.get_merge_request_labels(self.mr.iid) if BLOCKED_LABEL in mr_labels: self.gl_cli.remove_label_from_merge_request(self.mr.iid, BLOCKED_LABEL) sys.exit(self.exit_code) def check_branch(self): # The Merge Request can use the 'master' source branch if self.mr.source_branch == 'master': self.handle_error('source branch can not be master', MSG_BRANCH) return self.ERR_MASTER_BRANCH return self.OK def check_bot_access(self): # The bot needs access to the fork project try: project_bot = self.src.project.members.get(self.gl_cli.user.id) except GitlabGetError: self.handle_error('access denied for user {bot}', MSG_ACCESS) return self.ERR_NOT_A_MEMBER # The bot has to be a maintainer of the fork project if not project_bot or project_bot.access_level != MAINTAINER_ACCESS: self.handle_error('{bot} is not a maintainer in the fork project', MSG_ACCESS) return self.ERR_NOT_A_MAINTAINER return self.OK def handle_error(self, log_msg, mr_msg): LOG.error([log_msg.format(bot=self.gl_cli.user.username)]) self.gl_cli.add_label_to_merge_request(self.mr.iid, BLOCKED_LABEL) comment = mr_msg.format(user=self.mr.author['username'], bot=self.gl_cli.user.username, project_name=self.gl_cli.project.name) self.mr.notes.create({'body': comment})
def init_gitlab(gitlab_project_id): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() return GitLabApi(instance, project_id=gitlab_project_id, settings=settings)
def run(dry_run): instance = queries.get_gitlab_instance() settings = queries.get_app_interface_settings() repos = queries.get_repos_gitlab_owner(server=instance['url']) for repo in repos: gitlab_cli = GitLabApi(instance, project_url=repo, settings=settings) project_owners = RepoOwners(git_cli=gitlab_cli) for mr in gitlab_cli.get_merge_requests(state=MRState.OPENED): mr_approval = MRApproval(gitlab_client=gitlab_cli, merge_request=mr, owners=project_owners, dry_run=dry_run) approval_status = mr_approval.get_approval_status() if approval_status['approved']: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- already approved' ]) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- approving now' ]) if not dry_run: gitlab_cli.add_label_to_merge_request( mr.iid, APPROVAL_LABEL) continue if not dry_run: if mr_approval.has_approval_label(): _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- removing approval' ]) gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) if approval_status['report'] is not None: _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- publishing approval report' ]) if not dry_run: gitlab_cli.remove_label_from_merge_request( mr.iid, APPROVAL_LABEL) mr.notes.create({'body': approval_status['report']}) continue _LOG.info([ f'Project:{gitlab_cli.project.id} ' f'Merge Request:{mr.iid} ' f'- not fully approved' ])