class SimpleTriager(DefaultTriager): def run(self): # create the fileindexer fi_cache = '/tmp/ansibullbot/cache/{}.files.checkout'.format( self.repopath) fi_cache = os.path.expanduser(fi_cache) self.file_indexer = FileIndexer(checkoutdir=fi_cache, repo=self.repopath) self.file_indexer.update() # make a repo object for the github api repo = self.ghw.get_repo(self.repopath) # map for issue type to label try: label_map = repo.get_label_map() except UnknownObjectException: label_map = {} # collect issues if not self.args.number: issues = repo.get_issues() else: issue = repo.get_issue(int(self.args.number)) issues = [issue] # iterate through issues and apply actions for issue in issues: logging.info('triaging %s' % issue.html_url) actions = DefaultActions() # wrap the issue for extra magic iw = IssueWrapper(github=self.ghw, repo=repo, issue=issue, cachedir=self.cachedir, file_indexer=self.file_indexer) # what did the submitter provide in the body? td = iw.template_data missing = iw.missing_template_sections if missing and 'needs_template' not in iw.labels: actions.newlabel.append('needs_template') # what type of issue is this? if 'issue type' in td: mapped_label = label_map.get(td['issue type']) if mapped_label: if mapped_label not in iw.labels: actions.newlabel.append(mapped_label) pprint(vars(actions)) self.apply_actions(iw, actions)
class SimpleTriager(DefaultTriager): def __init__(self): super(SimpleTriager, self).__init__() # get valid labels logging.info('getting labels') self.valid_labels = self.get_valid_labels(self.repo) @classmethod def create_parser(cls): parser = DefaultTriager.create_parser() parser.description = "Triage issue and pullrequest queues for any github repo.\n" \ " (NOTE: only useful if you have commit access to" \ " the repo in question.)" parser.add_argument( "--pr", "--id", type=str, dest="number", help="Triage only the specified pr|issue (separated by commas)") parser.add_argument("--repo", "-r", type=str, required=True, help="Github repo to triage (defaults to all)") return parser def run(self): # create the fileindexer fi_cache = '/tmp/ansibullbot/cache/{}.files.checkout'.format(self.repo) fi_cache = os.path.expanduser(fi_cache) self.file_indexer = FileIndexer(botmetafile=self.botmetafile, checkoutdir=fi_cache, repo=self.repo) self.file_indexer.update() # make a repo object for the github api repo = self.ghw.get_repo(self.repo) # map for issue type to label try: label_map = repo.get_label_map() except UnknownObjectException: label_map = {} # collect issues if not self.number: issues = repo.get_issues() else: issue = repo.get_issue(int(self.number)) issues = [issue] # iterate through issues and apply actions for issue in issues: logging.info('triaging %s' % issue.html_url) actions = DefaultActions() # wrap the issue for extra magic cachedir = os.path.join(self.cachedir_base, self.repo) iw = IssueWrapper(github=self.ghw, repo=repo, issue=issue, cachedir=cachedir, file_indexer=self.file_indexer) # what did the submitter provide in the body? td = iw.template_data missing = iw.missing_template_sections if missing and 'needs_template' not in iw.labels: actions.newlabel.append('needs_template') # what type of issue is this? if 'issue type' in td: mapped_label = label_map.get(td['issue type']) if mapped_label: if mapped_label not in iw.labels: actions.newlabel.append(mapped_label) pprint(vars(actions)) self.apply_actions(iw, actions)
class SimpleTriager(DefaultTriager): def __init__(self): super(SimpleTriager, self).__init__() # get valid labels logging.info('getting labels') self.valid_labels = self.get_valid_labels(self.repo) @classmethod def create_parser(cls): parser = DefaultTriager.create_parser() parser.description = "Triage issue and pullrequest queues for any github repo.\n" \ " (NOTE: only useful if you have commit access to" \ " the repo in question.)" parser.add_argument("--pr", "--id", type=str, dest="number", help="Triage only the specified pr|issue (separated by commas)") parser.add_argument("--repo", "-r", type=str, required=True, help="Github repo to triage (defaults to all)") return parser def run(self): # create the fileindexer fi_cache = '/tmp/ansibullbot/cache/{}.files.checkout'.format(self.repo) fi_cache = os.path.expanduser(fi_cache) self.file_indexer = FileIndexer(botmetafile=self.botmetafile, checkoutdir=fi_cache, repo=self.repo) self.file_indexer.update() # make a repo object for the github api repo = self.ghw.get_repo(self.repo) # map for issue type to label try: label_map = repo.get_label_map() except UnknownObjectException: label_map = {} # collect issues if not self.number: issues = repo.get_issues() else: issue = repo.get_issue(int(self.number)) issues = [issue] # iterate through issues and apply actions for issue in issues: logging.info('triaging %s' % issue.html_url) actions = DefaultActions() # wrap the issue for extra magic cachedir = os.path.join(self.cachedir_base, self.repo) iw = IssueWrapper(github=self.ghw, repo=repo, issue=issue, cachedir=cachedir, file_indexer=self.file_indexer) # what did the submitter provide in the body? td = iw.template_data missing = iw.missing_template_sections if missing and 'needs_template' not in iw.labels: actions.newlabel.append('needs_template') # what type of issue is this? if 'issue type' in td: mapped_label = label_map.get(td['issue type']) if mapped_label: if mapped_label not in iw.labels: actions.newlabel.append(mapped_label) pprint(vars(actions)) self.apply_actions(iw, actions)
def main(): # need a file indexer to get the template FI = FileIndexer(checkoutdir='/tmp/fileindexer') FI.update() # get the expected sections tf_content = FI.get_file_content('.github/ISSUE_TEMPLATE.md') tf_sections = extract_template_sections(tf_content, header='#####') required_sections = [x.lower() for x in tf_sections.keys() if tf_sections[x]['required']] if not required_sections: required_sections = ['issue type', 'component name', 'ansible version', 'summary'] section_order = list(tf_sections.items()) section_order = sorted(section_order, key=lambda x: x[1]['index']) section_order = [x[0] for x in section_order] # all known possibilities section_names = ['PLUGIN NAME', 'ANSIBLE CONFIGURATION'] + section_order + ['ENVIRONMENT'] # get the numbers script = "#!/bin/bash\n" script += "\n" script += "URL='https://github.com/ansible/ansible/issues?utf8=%E2%9C%93&q=is%3Aopen%20label%3Aneeds_template%20author%3Aansibot'\n" script += "PYTHONPATH=$(pwd) scripts/scrape_github_issues_url $URL\n" (rc, so, se) = runscript(script) numbers = json.loads(so) numbers = sorted(set(numbers)) for idn,number in enumerate(numbers): print('{} {}|{}'.format(number,idn,len(numbers))) fixed = [] iurl = 'https://api.github.com/repos/ansible/ansible/issues/{}'.format(number) irr = requests.get(iurl, headers=get_headers()) idata = irr.json() curl = idata['comments_url'] crr = requests.get(curl, headers=get_headers()) comments = crr.json() if crr.links: print('paginated comments') nextp = [x for x in crr.links.items() if x[1]['rel'] == 'next'][0][1]['url'] while nextp: nrr = requests.get(nextp, headers=get_headers()) comments += nrr.json() try: nextp = [x for x in nrr.links.items() if x[1]['rel'] == 'next'][0][1]['url'] except: nextp = None #import epdb; epdb.st() newbody = idata['body'] # extract ts = run_template_extract(FI, newbody, number, 'issue', section_names) # cleanup if 'environment' in ts: ts['os / environment'] = ts['environment'] ts.pop('environment', None) # what is missing? missing = [x for x in required_sections if x.lower() not in ts] if not missing: print('{} nothing missing'.format(number)) continue # simple sed for this one if missing == ['component name'] and 'plugin name' in newbody.lower(): if 'PLUGIN NAME' in newbody: newbody = newbody.replace('PLUGIN NAME', 'COMPONENT NAME') if 'Plugin Name' in newbody: newbody = newbody.replace('Plugin Name', 'Component Name') if 'plugin name' in newbody: newbody = newbody.replace('plugin name', 'component name') print('{} sed/plugin name/component name'.format(number)) cr = requests.patch(iurl, headers=get_headers(), data=json.dumps({'body': newbody})) if cr.status_code != 200: print('failed to edit body {}'.format(idata['html_url'])) import epdb; epdb.st() continue if 'summary' in missing: ts['summary'] = newbody missing.remove('summary') fixed.append('summary') if 'issue type' in missing: # get migrated issue try: mi = get_migrated_issue(idata['body']) except Exception as e: print(e) mi = None if mi: itype = None # get issue type label from migrated issue mi_labels = [x['name'] for x in mi['labels']] if 'bug_report' in mi_labels: itype = 'Bug Report' elif 'feature_idea' in mi_labels: itype = 'Feature Idea' elif 'docs_report' in mi_labels: itype = 'Documentation Report' if itype is not None: ts['issue type'] = itype missing.remove('issue type') fixed.append('issue type') if 'component name' in missing: component = find_component(idata, ts, newbody, comments) if component: missing.remove('component name') ts['component name'] = component fixed.append('component name') if 'ansible version' in missing: labels = [x['name'] for x in idata['labels']] labels = [x for x in labels if x.startswith('affects_')] labels = sorted(set(labels)) if labels: version = labels[0].replace('affects_', '') else: version = "N/A" missing.remove('ansible version') ts['ansible version'] = version fixed.append('ansible version') if not missing: print('# {}'.format(idata['html_url'])) print('# title: {}'.format(idata['title'])) print('# component: {}'.format(ts['component name'])) print('# version: {}'.format(ts['ansible version'])) print('# fixed: {}'.format(fixed)) newbody = render_body(ts, section_order) print('<====================================================>') print(newbody) print('<====================================================>') import epdb; epdb.st() cr = requests.patch(iurl, headers=get_headers(), data=json.dumps({'body': newbody})) if cr.status_code != 200: print('failed to edit body {}'.format(idata['html_url'])) import epdb; epdb.st() continue print('no solution(s) for {} {}'.format(idata['html_url'], missing)) print('DONE')