def update_issue_comments(self, issue: Issue, comments_data: List[Dict], dry_run: bool) -> None: issue_id = issue.number if dry_run: print(f"Would update issue {issue_id} comments") return num_comments = len(comments_data) existing_comments = list(issue.get_comments()) # Create or update comments for comment_num, comment_data in enumerate(comments_data): print( f"Set comment {comment_num + 1}/{num_comments} of github issue #{issue_id}..." ) comment_body = comment_data["body"] if comment_num < len(existing_comments): existing_comments[comment_num].edit(comment_body) else: issue.create_comment(comment_body) # Delete comments in excess comments_to_delete = existing_comments[num_comments:] for i, gcomment in enumerate(comments_to_delete): print( f"Delete extra GitHub comment {i + 1}/{len(comments_to_delete)} of issue #{issue_id}..." ) gcomment.delete()
def check_if_removed_from_bugblog(bbt: Match, b: Tag, issue: Issue) -> None: if bbt is not None: text = strings.remove_smartquotes(bbt.group(1).strip()) for row in b.find_all('tr'): data = row.find_all('td') rowtext = strings.remove_smartquotes(data[1].text.strip()) if rowtext == text: break elif strip_squarebrackets(rowtext) == strip_squarebrackets(text): # Fix this print( "Issue #{id}'s bug blog text has differing autocard notation." .format(id=issue.number)) body = re.sub(BBT_REGEX, 'Bug Blog Text: {0}'.format(rowtext), issue.body, flags=re.MULTILINE) issue.edit(body=body) print('Updated to `{0}`'.format(rowtext)) break else: print('{id} is fixed!'.format(id=issue.number)) repo.create_comment( issue, 'This bug has been removed from the bug blog!') issue.edit(state='closed')
def get_merged_at(update_request: Issue) -> typing.Optional[int]: """Get merged_at time if the issue is pull_request.""" if update_request.pull_request: if update_request.as_pull_request().merged_at: return int( update_request.as_pull_request().merged_at.timestamp()) return None
def get_events_for_document(raw_document): gh = Github(settings.GH_TOKEN) document = Issue(gh._Github__requester, {}, raw_document, completed=True) raw_document['events'] = [] for event in document.get_events(): raw_document['events'].append(event.raw_data) wait_for_rate(event) return raw_document
def find_bbt_in_issue_title(issue: Issue, known_issues: Tag) -> None: title = strip_squarebrackets(issue.title).replace(' ', '') for row in known_issues.find_all('tr'): data = row.find_all('td') row_text = strip_squarebrackets(data[1].text.strip()).replace(' ', '') if row_text == title: body = issue.body body += '\nBug Blog Text: {0}'.format(data[1].text.strip()) if body != issue.body: issue.edit(body=body) return
def _upload_issue_attachments( self, issue: Issue, attachments: List[Attachment], ) -> None: body = self._upload_attachments( issue=issue, body=issue.body, attachments=attachments, ) issue.edit(body=body, )
def check_for_invalid_card_names(issue: Issue, cards: List[str]) -> None: labels = [lab.name for lab in issue.labels] fail = False for c in cards: if '//' in c: pass elif not c in cardnames(): fail = True if fail and not 'Invalid Card Name' in labels: issue.add_to_labels('Invalid Card Name') elif not fail and 'Invalid Card Name' in labels: issue.remove_from_labels('Invalid Card Name')
def get_events_for_document(raw_document): gh = get_gh_client() document = Issue(gh._Github__requester, {}, raw_document, completed=True) raw_document['events'] = [] try: for event in document.get_events(): raw_document['events'].append(event.raw_data) wait_for_rate(event) except UnknownObjectException: # If this object no longer exist don't do anything pass return raw_document
def close_github_issue(repo: Repository, issue: Issue) -> None: """Close a github issue Parameters ---------- repo : Repository The repository issue : Issue The issue """ try: issue.create_comment(f"Imagery seems to work again.") issue.edit(state=ISSUE_CLOSED) except Exception as e: print(f"Failed to close issue: {e}")
def find_bbt_in_body_or_comments(issue: Issue) -> Optional[str]: body = issue.body bbt = re.search(BBT_REGEX, issue.body, re.MULTILINE) if not bbt: for comment in issue.get_comments(): if bbt is None: bbt = re.search(BBT_REGEX, comment.body, re.MULTILINE) if bbt is not None: body += '\nBug Blog Text: {0}'.format( bbt.groups()[0].strip()) if body != issue.body: issue.edit(body=body) if bbt is not None: return bbt.groups()[0].strip() return None
def test_metatable_checker_grade_integration_add_success(mocker): MetaResponse = namedtuple('MetaResponse', 'exists item_id item_name geometry_type') Grade = namedtuple('Grade', 'check grade issue') grade = MetaTableChecker('table.name', {}).grade(add=True, report_value=MetaResponse( True, 'item id', 'item name', 'geometry type')) attributes = {} issue = Issue(REQUESTER, {}, attributes, True) issue.create_comment = spy = mocker.MagicMock() conductor.publish_grades( {'table.name': [Grade('meta table', grade, issue)]}, True) spy.assert_called_once_with('''## conductor results for table.name | check | status | | - | :-: | | meta table | | | - item id | :+1: | | - item name | :+1: | | - geometry type | :+1: |''')
def pr_review(issue: Issue, deadline: int): if issue.pull_request is not None: pr = issue.as_pull_request() rvs = list(pr.get_review_requests()[0]) if user in rvs: return issue.created_at + dt.timedelta(days=deadline)
def format_issue_with_labels(issue: Issue): global user labels = issue.get_labels() labels_str = '' for label in labels: labels_str += '[%s](https://github.com/%s/ghiblog/labels/%s), ' % ( label.name, user.get_user().login, urllib.parse.quote(label.name)) if '---' in issue.body: body_summary = issue.body[:issue.body.index('---')] else: body_summary = issue.body[:150] return ''' #### [{0}]({1}) {2} \t {3} :label: : {4} {5} [更多>>>]({1}) --- '''.format(issue.title, issue.html_url, sup('%s :speech_balloon:' % issue.comments), issue.created_at, labels_str[:-2], body_summary)
def create(title, body=None, locked=False, number=1, **kwargs): target = Issue(Mock(), Mock(), dict(), True) raw_data = { 'locked': locked } _patch_object(monkeypatch, target, number=number, title=title, body=body, raw_data=raw_data) return target
def last_commits(g, issue_ob: Issue, fast_mod=False): # 获取当前 issue 的关联 commit is_pr = issue_ob.pull_request # logger.debug(is_pr) this_repo = issue_ob.repository.full_name commits = [] flag = False if is_pr is None: events = issue_ob.get_timeline() for event_it in events: if event_it.event == 'cross-referenced': ref_name, _ = util.re_issue(event_it.source.issue.html_url) if ref_name == this_repo: if have_close_trigger(event_it.source.issue.body, issue_ob.number): commits.append( event_it.source.issue.pull_request.patch_url) if fast_mod: flag = True break if event_it.commit_url: repo_name = re.findall( r'https://api\.github\.com/repos/(.*?/[^/]*)/commits/([0-9a-zA-Z]+)', event_it.commit_url) tmp_repo, sha = repo_name[0] repo = g.get_repo(tmp_repo) commit = repo.get_commit(sha) if tmp_repo == this_repo and commit.files: commits.append(sha) if fast_mod: flag = True break if fast_mod: return flag return this_repo, commits
def test_publish_sheets_integration_test_remove_all_pass(mocker): SheetResponse = namedtuple('SheetResponse', 'valid messages') Grade = namedtuple('Grade', 'check grade issue') grades = { 'Description': True, 'Data Source': True, 'Deprecated': True, } grade = GSheetChecker('fake.table', 'id', 'name', 'TESTING').grade(add=False, report_value=SheetResponse( True, grades)) attributes = {} issue = Issue(REQUESTER, {}, attributes, True) issue.create_comment = spy = mocker.MagicMock() conductor.publish_grades( {'table.name': [Grade('sheetchecker', grade, issue)]}, True) spy.assert_called_once_with('''## conductor results for table.name | check | status | | - | :-: | | sheetchecker | | | - deprecation issue link | :+1: |''')
def get_issue_comments(self, task: Task, issue: Issue) -> bool: """ Gets the issues information. Args: task (Task): The task that requires the issues data. is_outdated (Callable): A function which checks if the task gets outdated during the download process. Returns: Optional[List[dict]]: List of dictionaries, each one with the data associated with a comment. """ comments = issue.get_comments() iter_comments: Iterator = iter(comments) while True: try: comment: GistComment = next(iter_comments) self.__db_manager.create_comment( task.repo_dir, issue.id, comment.id, comment.user.login, self.__downloaded_data_cleaner(comment.body or "") ) except StopIteration: return True except RateLimitExceededException: sleep_time: float = (self.__gh_conn.get_rate_limit_reset() - datetime.utcnow() + timedelta(0, 20)).total_seconds() time.sleep(sleep_time) continue
def update_issue_with_comments(self, issue: Issue, issue_data: Dict, dry_run: bool) -> None: meta = issue_data["issue"] if dry_run: print(f"Would update issue {issue.number} with {meta}") return issue.edit( title=meta["title"], body=meta["body"], labels=meta["labels"], state="closed" if meta["closed"] else "open", assignees=[] if meta["assignee"] is None else [meta["assignee"]], ) self.update_issue_comments(issue, issue_data["comments"], dry_run=dry_run)
def update_issue_body(issue: Issue, cards: List[str], see_also: Optional[Match]) -> None: expected = '<!-- Images --> ' images = re.search(IMAGES_REGEX, issue.body, re.MULTILINE) for row in strings.grouper(4, cards): expected = expected + '<img src="https://pennydreadfulmagic.com/image/{0}/" height="300px">'.format('|'.join([urllib.parse.quote(c) for c in row if c is not None])) if see_also is not None: for row in strings.grouper(5, re.findall(REGEX_CARDREF, see_also.group(1))): expected = expected + '<img src="https://pennydreadfulmagic.com/image/{0}/" height="250px">'.format('|'.join([urllib.parse.quote(c) for c in row if c is not None])) if not images: print('Adding Images...') body = issue.body + '\n' + expected issue.edit(body=body) elif images.group(0) != expected: print('Updating images...') body = issue.body.replace(images.group(0), expected) issue.edit(body=body)
def _add_duplicate_comment(issue: Issue, *, image: str, repo: str, run: str, stacktrace: str) -> IssueComment: """Comment on an existing error report.""" body = ( f"Probably duplicate error:\n" f"{_report_body(image=image, repo=repo, run=run, stacktrace=stacktrace)}" ) return issue.create_comment(body)
def judge_status(self, issue: Issue) -> str: bot_advice = super().judge_status(issue) # Prompt to add `issue-addressed` tag if customer has not replied > 7 days issue_labels = [label.name for label in issue.labels] if not bot_advice and 'issue-addressed' not in issue_labels and 'needs-author-feedback' not in issue_labels: if (datetime.today() - list(issue.get_comments())[-1].updated_at).days > 7: return 'no reply > 7' return bot_advice
def get_last_comment(issue: Issue) -> Optional[IssueComment]: last_comment: Optional[IssueComment] = None comment: IssueComment for comment in issue.get_comments(): if not last_comment: last_comment = comment elif comment.created_at > last_comment.created_at: last_comment = comment return last_comment
def get_issue_info(issue: Issue): issue_url = issue.html_url issue_title = issue.title last_issue_time = issue.updated_at or issue.created_at last_comments_time = get_comments_last_time(issue) if last_comments_time: last_issue_time = max(last_issue_time, last_comments_time) # issue_updated_at = max([issue_last].extend(get_comments_last_time(issue))) print(issue_title, issue_url, last_issue_time, issue.get_labels()[0])
def get_disposition(msc: Issue) -> str: """Returns the textual representation of the disposition of a MSC""" dispositions = ["merge", "close", "postpone"] for label in msc.get_labels(): for disposition in dispositions: # label.name is something like 'disposition:merge' # disposition is something like 'merge' if disposition in label.name: return disposition
def test_extract_metadata_from_issue_body_returns_none_when_not_found(): headers = {} attributes = {'body': 'text\nmore text\n<!-- some comments -->'} issue = Issue(REQUESTER, headers, attributes, True) metadata = conductor.extract_metadata_from_issue_body(issue, notify=False) assert metadata is None
def _already_commented(issue: Issue, *, repo: str) -> bool: """Check whether this repository has already commented on the issue.""" target = f"Repo: {repo}" if target in issue.body: return True for comment in issue.get_comments(): if target in comment.body: return True return False
def fix_user_errors(issue: Issue) -> None: body = issue.body # People sometimes put the affected cards on the following line. Account for that. body = re.sub(BAD_AFFECTS_REGEX, 'Affects: [', body) # People sometimes neglect Affects all-together, and only put cards in the title. affects = re.search(AFFECTS_REGEX, body, re.MULTILINE) if affects is None: cards = re.findall(REGEX_CARDREF, issue.title) cards = [c for c in cards] body = body + '\nAffects: ' + ''.join(['[' + c + ']' for c in cards]) if re.search(strings.REGEX_SEARCHREF, body): def do_search(m): search = m.group(1) n, cards, warnings = fetcher.search_scryfall(search) if n == 0 or warnings: return m.group(0) return ', '.join([f'[{c}]' for c in cards]) body = re.sub(strings.REGEX_SEARCHREF, do_search, body) # We had a bug where the above triggered infinitely. Clean it up. extra_affects = re.findall(AFFECTS_REGEX, body, re.MULTILINE) if len(extra_affects) > 1: lines = body.split('\n') if re.match(AFFECTS_REGEX, lines[-1]): body = '\n'.join(lines[:-1]) # People are missing the bullet points, and putting info on the following line instead. body = re.sub(r' - \r?\n', '', body) # Some people ignore the request for screenshots. body = body.replace('(Attach a screenshot or video here)', 'Currently Unconfirmed.') if repo.is_issue_from_bug_blog(issue): bbt = re.search(strings.BBT_REGEX, issue.body, re.MULTILINE) if not get_affects(issue) and bbt: cards = strings.get_cards_from_string(bbt.group(0)) if cards: cardlist = ', '.join([f'[{c}]' for c in cards]) body = strings.set_body_field(body, 'Affects', cardlist) # Push changes. if body != issue.body: issue.edit(body=body) # People are putting [cardnames] in square quotes, despite the fact we prefer Affects: now. title = strings.strip_squarebrackets(issue.title) if title != issue.title: print('Changing title of #{0} to "{1}"'.format(issue.number, title)) issue.edit(title=title)
def process_issue(*, issue: Issue, settings: Settings) -> None: logging.info(f"Processing issue: #{issue.number}") label_strs = set([label.name for label in issue.get_labels()]) events = list(issue.get_events()) labeled_events = get_labeled_events(events) last_comment = get_last_comment(issue) for keyword, keyword_meta in settings.input_config.items(): # Check closable delay, if enough time passed and the issue could be closed closable_delay = ( last_comment is None or (datetime.utcnow() - keyword_meta.delay) > last_comment.created_at ) # Check label, optionally removing it if there's a comment after adding it if keyword in label_strs: logging.info(f'Keyword: "{keyword}" in issue labels') keyword_event = get_last_event_for_label( labeled_events=labeled_events, label=keyword ) if ( last_comment and keyword_event and last_comment.created_at > keyword_event.created_at ): logging.info( f"Not closing as the last comment was written after adding the " f'label: "{keyword}"' ) if keyword_meta.remove_label_on_comment: logging.info(f'Removing label: "{keyword}"') issue.remove_from_labels(keyword) elif closable_delay: close_issue( issue=issue, keyword_meta=keyword_meta, keyword=keyword, label_strs=label_strs, ) break else: logging.info( f"Not clossing issue: #{issue.number} as the delay hasn't been reached: {keyword_meta.delay}" )
def create(title, body=None, locked=False, number=1, **kwargs): raw_data = { 'title': title, 'body': body, 'number': number, 'locked': locked } raw_data.update(kwargs) # Set url to a unique value, because that's how issues are compared raw_data['url'] = str(hash(tuple(raw_data.values()))) return Issue(Mock(), Mock(), raw_data, True)
def _extract_comments( self, github_issue: Issue, exclude_comments: Optional[List[str]] = None, ) -> List[TrackerIssueComment]: return [ self._extract_comment(github_comment=github_comment, ) for github_comment in github_issue.get_comments() if exclude_comments is None or str(github_comment.id) not in exclude_comments ]