def prepare_patch_set(topic): gerrit_url = "https://gerrit.xxxxxxxx.com/" GERRIT_PATCH_SET = str() auth = HTTPBasicAuth(username, password) rest = GerritRestAPI(url=gerrit_url, auth=auth) #changes = rest.get("/changes/?q=owner:self%20status:open") changes = rest.get("/changes/?q=topic:" + topic) for change in changes: #if change["status"] != "NEW" or change["mergeable"] == False: if change["status"] != "NEW": print(change["subject"] + " " + change["project"] + " " + str(change["_number"]) + " is not mergeable or already merged") exit(1) else: #print (change["subject"] + " " + change["project"] + " " + str(change["_number"])) #info = rest.get("/changes/?q=change:%d&o=CURRENT_REVISION&o=CURRENT_REVISION&o=CURRENT_COMMIT&o=CURRENT_FILES" %change["_number"]) changeId = change["_number"] info = rest.get( "/changes/?q=change:%d&o=CURRENT_REVISION&o=CURRENT_COMMIT" % changeId) repo = change["project"] currefId = ( info[0]["revisions"][info[0]["current_revision"]]["_number"]) GERRIT_PATCH_SET = GERRIT_PATCH_SET + " | " + ( repo + " " + str(change["_number"]) + "/" + str(currefId)) return GERRIT_PATCH_SET
def get_repositories(username, password, gerrit_url): auth = HTTPBasicAuth(username, password) rest = GerritRestAPI(url=gerrit_url, auth=auth) # get server information to derive clone commands logging.debug("getting server information..") server_info = rest.get("/config/server/info") clone_command = server_info["download"]["schemes"]["ssh"]["clone_commands"] clone_command = clone_command["Clone with commit-msg hook"] # get all projects logging.debug("getting project information..") repositories = rest.get("/projects/") result = [] for repository_name in repositories.keys(): project_clone_command = clone_command \ .replace("${project}", repository_name) \ .replace("${project-base-name}", repository_name) result.append({ "name": repository_name, "scm": "git", "source": "gerrit", "source_username": username, "clone_command": project_clone_command }) return result
def _gerrit_get(self, endpoint_url): auth = HTTPBasicAuth(self.username, self.password) rest = GerritRestAPI(url=self.url, auth=auth) try: response_body = rest.get(endpoint_url) except HTTPError as e: msg = "Failed to get response from Gerrit URL %s: %s" % ( endpoint_url, str(e)) log.error(msg) raise exceptions.HTTPError return response_body
class GerritControllerViaWeb(GerritController): def __init__(self, project, max_no): super(GerritControllerViaWeb, self).__init__(project, max_no) self.rest = GerritRestAPI(url=f'https://{self.project["url"]}', auth=Anonymous()) @backoff.on_exception(backoff.expo, requests.exceptions.ConnectionError, max_time=1000000) def _get(self): # TODO: データ取得(q=no) changes = self.rest.get( "changes/?q=is:open&q=is:close&q=all&o=DETAILED_ACCOUNTS&o=ALL_REVISIONS&o=ALL_COMMITS&o=ALL_FILES&o=MESSAGES", headers={'Content-Type': 'application/json'}) return changes pass def _get_run_info(self): return QueryViaWeb(self.project, self.current_review_id)
class RestAPI: def __init__(self, credentialsFile, gerritUrl): # Get Login Authentication information # This expects a file that contains only a Gerrit user's # <Username> <HTTP Password> # Currently, this is found on Gerrit, select: # -> Your username dropdown # -> Settings # -> HTTP Password scriptPath = os.path.dirname(os.path.abspath(__file__)) authFilepath = os.path.expanduser(scriptPath + "/" + credentialsFile) if os.path.isfile(authFilepath) == False: print("Error: No authentication file named " + credentialsFile + " found") vprint(scriptPath) quit() with open(authFilepath, 'r') as loginFile: line = loginFile.readline() login = line.split() if len(login) < 2: print("Error: Insufficient login credentials") quit() user = login[0] password = login[1] auth = HTTPBasicAuth(user, password) self.rest = GerritRestAPI(url=gerritUrl, auth=auth) # Wrapper for GerritRestAPI's GET method def get(self, query): result = self.rest.get(query, headers={'Content-Type': 'application/json'}) return result # Wrapper for GerritRestAPI's review method def review(self, changeID, revision, review): result = self.rest.review(changeID, revision, review) return result
def test_gerrit_repositories(local_salt_client): missing_repos = [] config = utils.get_configuration() gerrit_password = local_salt_client.cmd('I@gerrit:client', 'pillar.get', ['_param:openldap_admin_password'], expr_form='compound').values()[0] gerrit_port = local_salt_client.cmd('I@gerrit:client', 'pillar.get', ['gerrit:client:server:http_port'], expr_form='compound').values()[0] gerrit_address = local_salt_client.cmd('I@gerrit:client', 'pillar.get', ['gerrit:client:server:host'], expr_form='compound').values()[0] gerrit_protocol = local_salt_client.cmd('I@gerrit:client', 'pillar.get', ['gerrit:client:server:protocol'], expr_form='compound').values()[0] auth = HTTPBasicAuth('admin', gerrit_password) rest = GerritRestAPI(url="{0}://{1}:{2}".format(gerrit_protocol, gerrit_address, gerrit_port), auth=auth) for repo in config['drivetrain_repos']: repoHttp = repo.replace("/", "%2F") try: response = rest.get("/projects/{0}".format(repoHttp)) except requests.exceptions.HTTPError as e: missing_repos.append("Repo {0} is missing".format(repo)) assert len(missing_repos) == 0, \ '''Some repositories in Gerrit are missing: {}'''.format(json.dumps(missing_repos, indent=4))
class Gerrit(object): def __init__(self, url, netrc=None, use_internal=False): auth = AuthFromNetrc(netrc, url, use_internal) self.timeout = 90 self.rest = GerritRestAPI(url=url, auth=auth) self.url = url self.change_options = [ 'CURRENT_REVISION', 'MESSAGES', 'DETAILED_LABELS', 'DETAILED_ACCOUNTS', 'COMMIT_FOOTERS' ] def get_change(self, change_id, rev_num=None): options = self.change_options if rev_num != None: options += ['ALL_REVISIONS'] uri = '/changes/{}?o={}'.format(change_id, '&o='.join(options)) rest = self.rest.get(uri, timeout=self.timeout) c = GerritChange(self.url, rest) # The modifications to change here shouldn't be relied upon, but rolling # back to a previous revision is useful for testing. So we'll do our best # to act like the requested revision is the current_revision and hope # nothing downstream of us gets too confused if rev_num != None: uri = '/changes/{}/revisions/{}/commit'.format(change_id, rev_num) rest = self.rest.get(uri, timeout=self.timeout) for r in c.revisions: if int(r.number) != int(rev_num): continue r.commit_message = rest['message'] c.subject = rest['subject'] c.current_revision = r uri = '/changes/{}/comments/'.format(change_id) rest = self.rest.get(uri, timeout=self.timeout) #pprint.PrettyPrinter(indent=4).pprint(rest) c.add_comments(rest) return c def get_ancestor_changes(self, change): uri = '/changes/{}/revisions/current/related'.format(change.id) related_changes = self.rest.get(uri, timeout=self.timeout)['changes'] changes = [] parents = [] for c in related_changes: if c['change_id'] == change.change_id: parents = c['commit']['parents'] break while True: new_parents = [] for p in parents: for c in related_changes: if c['commit']['commit'] == p['commit']: new_parents += c['commit']['parents'] changes.append(self.get_change(c['_change_number'])) break if new_parents: parents = new_parents else: break return changes def query_changes(self, status=None, message=None, after=None, age_days=None, change_id=None, change_num=None, project=None, owner=None, branches=None): query = [] if message: query.append('message:"{}"'.format(urllib.parse.quote(message))) if status: query.append('status:{}'.format(status)) if after: query.append('after:"{}"'.format(after.isoformat())) if age_days: query.append('age:{}d'.format(age_days)) if change_id: query.append('change:{}'.format(change_id)) if change_num: query.append('change:{}'.format(change_num)) if project: query.append('project:{}'.format(project)) if owner: query.append('owner:{}'.format(owner)) if branches: if len(branches) == 1: q = 'branch:{}'.format(branches[0]) else: q = '(branch:' q += ' OR branch:'.join(branches) q += ')' query.append(q) uri = '/changes/?q={}&o={}'.format('+'.join(query), '&o='.join(self.change_options)) changes = [] for c in self.rest.get(uri, timeout=self.timeout): changes.append(GerritChange(self.url, c)) return changes def get_patch(self, change): uri = '/changes/{}/revisions/{}/patch'.format( change.id, change.current_revision.id) return self.rest.get(uri, timeout=self.timeout) def get_messages(self, change): uri = '/changes/{}/messages'.format(change.id) return self.rest.get(uri, timeout=self.timeout) def set_topic(self, change): # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#set-topic uri = '/changes/{}/topic'.format(change.id) options = {'topic': change.topic} try: self.rest.put(uri, data=options, timeout=self.timeout) return True except requests.exceptions.HTTPError: return False def remove_reviewer(self, change): uri = '/changes/{}/reviewers/self/delete'.format(change.id) options = { 'notify': 'NONE', } try: self.rest.post(uri, data=options, timeout=self.timeout) return True except requests.exceptions.HTTPError: return False def abandon(self, change): uri = '/changes/{}/abandon'.format(change.id) try: self.rest.post(uri, timeout=self.timeout) return True except requests.exceptions.HTTPError: return False def review(self, change, tag, message, notify_owner, vote_code_review=None, vote_verified=None, vote_cq_ready=None, inline_comments=None): review = { 'tag': tag, 'message': message, 'notify': 'OWNER' if notify_owner else 'NONE', 'omit_duplicate_comments': True, } labels = {} if vote_code_review != None: labels['Code-Review'] = vote_code_review if vote_verified != None: labels['Verified'] = vote_verified if vote_cq_ready != None: labels['Commit-Queue'] = vote_cq_ready if labels: review['labels'] = labels if inline_comments: review['comments'] = inline_comments #pprint.PrettyPrinter(indent=4).pprint(review) #pprint.PrettyPrinter(indent=4).pprint(json.dumps(review)) uri = "changes/{}/revisions/{}/review".format( change.id, change.current_revision.id) return self.rest.post(uri, data=json.dumps(review), headers={"Content-Type": "application/json"}, timeout=self.timeout)
def _main(): descr = "Send request using Gerrit HTTP API" parser = argparse.ArgumentParser( description=descr, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("-g", "--gerrit-url", dest="gerrit_url", required=True, help="gerrit server url") parser.add_argument( "-b", "--basic-auth", dest="basic_auth", action="store_true", help="(deprecated) use basic auth instead of digest", ) parser.add_argument( "-d", "--digest-auth", dest="digest_auth", action="store_true", help="use digest auth instead of basic", ) if _KERBEROS_SUPPORT: parser.add_argument( "-k", "--kerberos-auth", dest="kerberos_auth", action="store_true", help="use kerberos auth", ) parser.add_argument("-u", "--username", dest="username", help="username") parser.add_argument("-p", "--password", dest="password", help="password") parser.add_argument( "-n", "--netrc", dest="netrc", action="store_true", help="Use credentials from netrc", ) parser.add_argument( "-v", "--verbose", dest="verbose", action="store_true", help="enable verbose (debug) logging", ) options = parser.parse_args() level = logging.DEBUG if options.verbose else logging.INFO logging.basicConfig(format="%(asctime)s %(levelname)s %(message)s", level=level) if _KERBEROS_SUPPORT and options.kerberos_auth: if options.username or options.password or options.basic_auth or options.netrc: parser.error("--kerberos-auth may not be used together with " "--username, --password, --basic-auth or --netrc") auth = HTTPKerberosAuth(mutual_authentication=OPTIONAL) elif options.username and options.password: if options.netrc: logging.warning("--netrc option ignored") if options.digest_auth: auth = HTTPDigestAuth(options.username, options.password) else: auth = HTTPBasicAuth(options.username, options.password) elif options.netrc: if options.digest_auth: auth = HTTPDigestAuthFromNetrc(url=options.gerrit_url) else: auth = HTTPBasicAuthFromNetrc(url=options.gerrit_url) else: auth = None rest = GerritRestAPI(url=options.gerrit_url, auth=auth) try: query = ["status:open"] if auth: query += ["owner:self"] else: query += ["limit:10"] changes = rest.get("/changes/?q=%s" % "%20".join(query)) logging.info("%d changes", len(changes)) for change in changes: logging.info(change["change_id"]) except RequestException as err: logging.error("Error: %s", str(err))
class GerritPatches(object): """ Determine all relevant patches to apply to a repo sync based on a given set of initial parameters, which can be a set of one of the following: - review IDs - change IDs - topics The resulting data will include the necessary patch commands to be applied to the repo sync """ def __init__(self, gerrit_url, user, passwd): """Initial Gerrit connection and set base options""" auth = HTTPBasicAuth(user, passwd) self.rest = GerritRestAPI(url=gerrit_url, auth=auth) self.base_options = [ 'CURRENT_REVISION', 'CURRENT_COMMIT', 'DOWNLOAD_COMMANDS' ] self.seen_reviews = set() def query(self, query_string, options=None): """ Get results from Gerrit for a given query string, returning a dictionary keyed off the relevant review IDs, the values being a special object containing all relevant information about a review """ if options is None: options = self.base_options opt_string = '&o='.join([''] + options) data = dict() try: results = self.rest.get(query_string + opt_string) except requests.exceptions.HTTPError as exc: raise RuntimeError(exc) else: for result in results: num_id = result['_number'] data[num_id] = GerritChange(result) return data def get_changes_via_review_id(self, review_id): """Find all reviews for a given review ID""" return self.query('/changes/?q={}'.format(review_id)) def get_changes_via_change_id(self, change_id): """Find all reviews for a given change ID""" return self.query('/changes/?q=change:{}'.format(change_id)) def get_changes_via_topic_id(self, topic): """Find all reviews for a given topic""" return self.query('/changes/?q=topic:{}'.format(topic)) def get_open_parents(self, review): """Find all open parent reviews for a given review""" reviews = dict() if not review.parents: return reviews # Search recursively up via the parents until no more # open reviews are found for parent in review.parents: p_review = self.query( '/changes/?q=status:open+commit:{}'.format(parent)) if not p_review: continue p_review_id = p_review.keys()[0] reviews.update(p_review) reviews.update(self.get_open_parents(p_review[p_review_id])) return reviews def get_reviews(self, initial_args, id_type): """ From an initial set of parameters (review IDs, change IDs or topics), determine all relevant open reviews that will need to be applied to a repo sync via patching """ all_reviews = dict() stack = list() # Generate initial set of reviews from the initial set # of parameters, generating the stack (list of review IDs) # from the results for initial_arg in initial_args: reviews = getattr( self, 'get_changes_via_{}_id'.format(id_type))(initial_arg) stack.extend([r_id for r_id in reviews.keys()]) # From the stack, check each entry and add to the final set # of reviews if not already there, keeping track of which # have been seen so far. For each review, also look for any # related reviews via change ID and topic, along with any # still open parents, adding to the stack as needed. All # relevant reviews will have been found once the stack is empty. while stack: review_id = stack.pop() reviews = self.get_changes_via_review_id(review_id) for new_id, review in reviews.iteritems(): if new_id in self.seen_reviews: continue all_reviews[new_id] = review self.seen_reviews.add(new_id) change_reviews = self.get_changes_via_change_id( review.change_id) stack.extend([ r_id for r_id in change_reviews.keys() if r_id not in self.seen_reviews ]) if review.topic is not None: topic_reviews = self.get_changes_via_topic_id(review.topic) stack.extend([ r_id for r_id in topic_reviews.keys() if r_id not in self.seen_reviews ]) stack.extend([ r_id for r_id in self.get_open_parents(review) if r_id not in self.seen_reviews ]) return all_reviews
class Gerrit(object): def __init__(self, url): auth = HTTPBasicAuthFromNetrc(url=url) self.rest = GerritRestAPI(url=url, auth=auth) self.url = url self.change_options = [ 'CURRENT_REVISION', 'MESSAGES', 'DETAILED_LABELS', 'DETAILED_ACCOUNTS', 'COMMIT_FOOTERS' ] def get_change(self, change_id): uri = '/changes/{}?o={}'.format(change_id, '&o='.join(self.change_options)) rest = self.rest.get(uri) return GerritChange(self.url, rest) def get_related_changes(self, change): uri = '/changes/{}/revisions/current/related'.format(change.id) changes = [] for c in self.rest.get(uri)['changes']: changes.append(self.get_change(c['change_id'])) return changes def query_changes(self, status=None, message=None, after=None, age_days=None, change_id=None, change_num=None, project=None): query = [] if message: query.append('message:"{}"'.format(urllib.parse.quote(message))) if status: query.append('status:{}'.format(status)) if after: query.append('after:"{}"'.format(after.isoformat())) if age_days: query.append('age:{}d'.format(age_days)) if change_id: query.append('change:{}'.format(change_id)) if change_num: query.append('change:{}'.format(change_num)) if project: query.append('project:{}'.format(project)) uri = '/changes/?q={}&o={}'.format('+'.join(query), '&o='.join(self.change_options)) changes = [] for c in self.rest.get(uri): changes.append(GerritChange(self.url, c)) return changes def get_patch(self, change): uri = '/changes/{}/revisions/{}/patch'.format( change.id, change.current_revision.id) return self.rest.get(uri) def get_messages(self, change): uri = '/changes/{}/messages'.format(change.id) return self.rest.get(uri) def review(self, change, tag, message, notify_owner, vote_code_review=None, vote_verified=None, vote_cq_ready=None, vote_trybot_ready=None): review = { 'tag': tag, 'message': message, 'notify': 'OWNER' if notify_owner else 'NONE', } labels = {} if vote_code_review != None: labels['Code-Review'] = vote_code_review if vote_verified != None: labels['Verified'] = vote_verified if vote_cq_ready != None: labels['Commit-Queue'] = vote_cq_ready if vote_trybot_ready != None: labels['Trybot-Ready'] = vote_trybot_ready if labels: review['labels'] = labels #pprint.PrettyPrinter(indent=4).pprint(review) return self.rest.review(change.id, change.current_revision.id, json.dumps(review))
#!/usr/bin/env python from pygerrit2 import GerritRestAPI from requests.auth import HTTPDigestAuth from datetime import datetime, timedelta import pprint import json import os auth = HTTPDigestAuth('user', 'http-password-from-settings') rest = GerritRestAPI(url='http://*****:*****@yourserver kill ' + task["id"])
def getChanges(): url = 'http://freakyos.xyz' auth = Anonymous() rest = GerritRestAPI(url=url, auth=auth) return rest.get('/changes/?q=status:open')
from pygerrit2 import GerritRestAPI, HTTPBasicAuth username = "******" password = "******" gerrit_url = "https://gerrit.abcdefgh.com/" repo = "aaa/bbbb/ccccc/generic" auth = HTTPBasicAuth(username, password) rest = GerritRestAPI(url=gerrit_url, auth=auth) commits = rest.get("/changes/?q=project:" + repo) for commit in commits: if commit["status"] == "MERGED": print(commit["submitted"].split()[0] + " " + commit["subject"])
class Gerrit(object): def __init__(self, url, use_internal=False): auth = AuthFromNetrc(url, use_internal) self.rest = GerritRestAPI(url=url, auth=auth) self.url = url self.change_options = [ 'CURRENT_REVISION', 'MESSAGES', 'DETAILED_LABELS', 'DETAILED_ACCOUNTS', 'COMMIT_FOOTERS' ] def get_change(self, change_id, rev_num=None): options = self.change_options if rev_num != None: options += ['ALL_REVISIONS'] uri = '/changes/{}?o={}'.format(change_id, '&o='.join(options)) rest = self.rest.get(uri) c = GerritChange(self.url, rest) # The modifications to change here shouldn't be relied upon, but rolling # back to a previous revision is useful for testing. So we'll do our best # to act like the requested revision is the current_revision and hope # nothing downstream of us gets too confused if rev_num != None: uri = '/changes/{}/revisions/{}/commit'.format(change_id, rev_num) rest = self.rest.get(uri) for r in c.revisions: if int(r.number) != int(rev_num): continue r.commit_message = rest['message'] c.subject = rest['subject'] c.current_revision = r return c def get_related_changes(self, change): uri = '/changes/{}/revisions/current/related'.format(change.id) changes = [] for c in self.rest.get(uri)['changes']: changes.append(self.get_change(c['change_id'])) return changes def query_changes(self, status=None, message=None, after=None, age_days=None, change_id=None, change_num=None, project=None): query = [] if message: query.append('message:"{}"'.format(urllib.parse.quote(message))) if status: query.append('status:{}'.format(status)) if after: query.append('after:"{}"'.format(after.isoformat())) if age_days: query.append('age:{}d'.format(age_days)) if change_id: query.append('change:{}'.format(change_id)) if change_num: query.append('change:{}'.format(change_num)) if project: query.append('project:{}'.format(project)) uri = '/changes/?q={}&o={}'.format('+'.join(query), '&o='.join(self.change_options)) changes = [] for c in self.rest.get(uri): changes.append(GerritChange(self.url, c)) return changes def get_patch(self, change): uri = '/changes/{}/revisions/{}/patch'.format( change.id, change.current_revision.id) return self.rest.get(uri) def get_messages(self, change): uri = '/changes/{}/messages'.format(change.id) return self.rest.get(uri) def remove_reviewer(self, change): uri = '/changes/{}/reviewers/self/delete'.format(change.id) options = { 'notify': 'NONE', } try: self.rest.post(uri, data=options) return True except requests.exceptions.HTTPError as e: return False def review(self, change, tag, message, notify_owner, vote_code_review=None, vote_verified=None, vote_cq_ready=None, inline_comments=None): review = { 'tag': tag, 'message': message, 'notify': 'OWNER' if notify_owner else 'NONE', 'omit_duplicate_comments': True, } labels = {} if vote_code_review != None: labels['Code-Review'] = vote_code_review if vote_verified != None: labels['Verified'] = vote_verified if vote_cq_ready != None: labels['Commit-Queue'] = vote_cq_ready if labels: review['labels'] = labels if inline_comments: review['comments'] = inline_comments #pprint.PrettyPrinter(indent=4).pprint(review) #pprint.PrettyPrinter(indent=4).pprint(json.dumps(review)) return self.rest.review(change.id, change.current_revision.id, json.dumps(review))
# Apply the patch file. def git_apply(patch_file_name): try: # "--keep-non-patch" avoids trimming non-patch [TAG] parts of the commit # message. git_repo.git.am(patch_file_name, "--keep-non-patch") return True except: print " FAILED TO APPLY PATCH" git_repo.git.am("--abort") return False git_checkout("master") rest = GerritRestAPI(url='https://fuchsia-review.googlesource.com') changes = rest.get("/changes/?q=owner:" + owner + "%20status:open%20repo:" + repo) # Git Commit --> Change Object. git_commit_map = dict() # (change object, commit object) change_list = [] for change in changes: # XXX This seems unnecessary... # if not change['mergeable']: # print " CANNOT MERGE: " + change['subject'] # continue id = change['id'] commit = rest.get("/changes/" + id + "/revisions/current/commit")
def main(): if len(sys.argv) > 1 and sys.argv[1] == "--no-download": print("Using cached version of changes...", file=sys.stderr) with open('changes.json', 'r') as f: changes = json.loads(f.read()) else: print("Getting changes from Gerrit...", file=sys.stderr) query = "branch:cm-11.0" #query = "topic:asb-2019.02-cm11" rest = GerritRestAPI(url='https://review.lineageos.org') changes = rest.get("/changes/?q={}".format(query)) # Go through all pages (we only get 500 per request) while "_more_changes" in changes[-1]: newchanges = rest.get("/changes/?q={}&start={}".format( query, str(len(changes)))) changes.extend(newchanges) print("Got {} changes...".format(len(changes)), file=sys.stderr) # Write json to file with open('changes.json', 'w') as f: print("Writing changes as json to changes.json...", file=sys.stderr) f.write(json.dumps(changes)) # Matches the many different topic names that were used over the years, see https://pastebin.com/raw/d4sSPihB asbre = re.compile( r"^(?:cm-11-)?asb-\d{4}\.\d{2}(?:\.\d{2})?(?:-cm11|-cm-11.0)?$") asb_dict = {} print("Filtering changes by topic...", file=sys.stderr) for change in changes: if "topic" in change: topic = change["topic"] if asbre.match(topic): if topic not in asb_dict: asb_dict[topic] = [] asb_dict[topic].append(change) print("Iterating through {} ASB topics...".format(len(asb_dict)), file=sys.stderr) merged = 0 total = 0 for asb, changes in sorted(asb_dict.items()): if asb in ignore_asb: continue print("*{}*\n".format(asb)) for change in changes: if change["status"] == "ABANDONED": # print("Skipping abandoned change.") continue repo = change["project"].replace("LineageOS/", "") change_id = change["change_id"] # Ignore the repos in that list if repo in ignore_repos: continue # Ignore changes in that list if change["_number"] in ignore_changes: continue if change_id_present(repo, change_id, change["status"] == "MERGED"): mystr = "- [x]" merged += 1 else: mystr = "- [ ]" if change["status"] == "NEW": warning = " (warning: not merged yet)" else: warning = "" total += 1 print("{} {} {}{}".format(mystr, change["_number"], repo, warning)) print() print("Merged: {} - Total: {}".format(merged, total))
def _main(): descr = 'Send request using Gerrit HTTP API' parser = argparse.ArgumentParser( description=descr, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-g', '--gerrit-url', dest='gerrit_url', required=True, help='gerrit server url') parser.add_argument('-b', '--basic-auth', dest='basic_auth', action='store_true', help='(deprecated) use basic auth instead of digest') parser.add_argument('-d', '--digest-auth', dest='digest_auth', action='store_true', help='use digest auth instead of basic') if _KERBEROS_SUPPORT: parser.add_argument('-k', '--kerberos-auth', dest='kerberos_auth', action='store_true', help='use kerberos auth') parser.add_argument('-u', '--username', dest='username', help='username') parser.add_argument('-p', '--password', dest='password', help='password') parser.add_argument('-n', '--netrc', dest='netrc', action='store_true', help='Use credentials from netrc') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='enable verbose (debug) logging') options = parser.parse_args() level = logging.DEBUG if options.verbose else logging.INFO logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=level) if _KERBEROS_SUPPORT and options.kerberos_auth: if options.username or options.password \ or options.basic_auth or options.netrc: parser.error("--kerberos-auth may not be used together with " "--username, --password, --basic-auth or --netrc") auth = HTTPKerberosAuth(mutual_authentication=OPTIONAL) elif options.username and options.password: if options.netrc: logging.warning("--netrc option ignored") if options.digest_auth: auth = HTTPDigestAuth(options.username, options.password) else: auth = HTTPBasicAuth(options.username, options.password) elif options.netrc: if options.digest_auth: auth = HTTPDigestAuthFromNetrc(url=options.gerrit_url) else: auth = HTTPBasicAuthFromNetrc(url=options.gerrit_url) else: auth = None rest = GerritRestAPI(url=options.gerrit_url, auth=auth) try: query = ["status:open"] if auth: query += ["owner:self"] else: query += ["limit:10"] changes = rest.get("/changes/?q=%s" % "%20".join(query)) logging.info("%d changes", len(changes)) for change in changes: logging.info(change['change_id']) except RequestException as err: logging.error("Error: %s", str(err))
def main(ctx, info_file, gerrit_url, change_number, tsc, majority_of_committers): """Function so we can iterate into TSC members after commiter vote has happend.""" with open(info_file) as file: try: info_data = yaml.safe_load(file) except yaml.YAMLError as exc: log.error(exc) committer_info = info_data['committers'] info_committers = [] for count, item in enumerate(committer_info): committer = committer_info[count]['id'] info_committers.append(committer) rest = GerritRestAPI(url=gerrit_url) changes = rest.get("changes/{}/reviewers".format(change_number)) info_change = [] for change in changes: line = (change['username'], change['approvals']['Code-Review']) if '+1' in line[1] or '+2' in line[1]: info_change.append(change['username']) have_not_voted = [ item for item in info_committers if item not in info_change ] have_not_voted_length = (len(have_not_voted)) have_voted = [item for item in info_committers if item in info_change] have_voted_length = (len(have_voted)) log.info("Number of Committers:") log.info(len(info_committers)) committer_lenght = (len(info_committers)) log.info("Committers that have voted:") log.info(have_voted) log.info(have_voted_length) log.info("Committers that have not voted:") log.info(have_not_voted) log.info(have_not_voted_length) if (have_voted_length == 0): log.info("No one has voted:") sys.exit(1) if (have_voted_length != 0): majority = (committer_lenght / have_voted_length) if (majority == 1): log.info("Majority committer vote reached") if (tsc): log.info("Need majority of tsc") info_file = tsc majority_of_committers += 1 if majority_of_committers == 2: log.info("TSC majority reached auto merging commit") else: main(ctx, info_file, gerrit_url, change_number, tsc, majority_of_committers) else: log.info("majority not yet reached") sys.exit(1)
class GerritPatches: """ Determine all relevant patches to apply to a repo sync based on a given set of initial parameters, which can be a set of one of the following: - review IDs - change IDs - topics The resulting data will include the necessary patch commands to be applied to the repo sync """ def __init__(self, gerrit_url, user, passwd, checkout=False, whitelist_branches=[]): """Initial Gerrit connection and set base options""" auth = HTTPBasicAuth(user, passwd) self.rest = GerritRestAPI(url=gerrit_url, auth=auth) self.base_options = [ 'CURRENT_REVISION', 'CURRENT_COMMIT', 'DOWNLOAD_COMMANDS' ] self.patch_command = 'Checkout' if checkout else 'Cherry Pick' # We need to track reviews which were specifically requested as these # are applied regardless of their status. Derived reviews are only # applied if they are still open self.requested_reviews = [] # We track what's been applied to ensure at least the changes we # specifically requested got done self.applied_reviews = [] # Manifest project name (could theoretically be dynamic) self.manifest = None # The manifest is only read from disk if manifest_stale is true. This # is to facilitate a re-read in the event a patch is applied to the # manifest itself self.manifest_stale = True self.manifest_project = 'manifest' # We use this regex to determine if the revision is a sha, for a # given project in the manifest self.sha_re = re.compile(r'[0-9a-f]{40}') self.whitelist_branches = whitelist_branches @classmethod def from_config_file(cls, config_path, checkout=False, whitelist_branches=[]): """ Factory method: construct a GerritPatches from the path to a config file """ if not os.path.exists(config_path): logger.error(f'Configuration file {config_path} missing!') sys.exit(1) gerrit_config = configparser.ConfigParser() gerrit_config.read(config_path) if 'main' not in gerrit_config.sections(): logger.error( f'Invalid config file "{config_path}" (missing "main" section)' ) sys.exit(1) try: gerrit_url = gerrit_config.get('main', 'gerrit_url') user = gerrit_config.get('main', 'username') passwd = gerrit_config.get('main', 'password') except configparser.NoOptionError: logger.error('One of the options is missing from the config file: ' 'gerrit_url, username, password. Aborting...') sys.exit(1) return cls(gerrit_url, user, passwd, checkout, whitelist_branches) def get_project_path_and_branch_from_manifest(self, project): branch = None path = None if self.manifest_stale: # Read in the manifest. We ask repo to report the manifest, because # that automatically filters out projects that were not synced due # to groups ("repo init -g ....", or being in "notdefault" group). manifest_str = subprocess.check_output(['repo', 'manifest']) self.manifest = EleTree.fromstring(manifest_str) self.manifest_stale = False proj_info = self.manifest.find(f'.//project[@name="{project}"]') if proj_info != None: path = proj_info.attrib.get('path', project) branch = proj_info.attrib.get('revision') if branch == None: default = self.manifest.find(".//default") if default != None: branch = default.attrib.get('revision') return (path, branch) def query(self, query_string, options=None, quiet=False): """ Get results from Gerrit for a given query string, returning a dictionary keyed off the relevant review IDs, the values being a special object containing all relevant information about a review """ if options is None: options = self.base_options opt_string = '&o='.join([''] + options) data = dict() try: q_string = query_string + opt_string logger.debug(f" Query is: {q_string}") results = self.rest.get(q_string) except requests.exceptions.HTTPError as exc: raise RuntimeError(exc) else: for result in results: num_id = str(result['_number']) # Always cherry-pick for manifest project if result['project'] == self.manifest_project: patch_command = "Cherry Pick" else: patch_command = self.patch_command data[num_id] = GerritChange(result, patch_command) if not quiet: logger.debug(f' Review IDs from query: {", ".join(list(data))}') return data def get_changes_via_review_id(self, review_id): """Find all reviews for a given review ID""" # Query review ID directly first to ensure it exist and is readable # This filters out invalid and inaccessible "prviate" review IDs. # A "private" review ID is a review marked "private" by the owner so that no one else can see it. # Only gerrit admin and users with "View Private Changes" permission can see a "private" review. logger.debug(f'Ensuring review ID {review_id} is not private') if (len(self.query(f'/changes/?q={review_id}', quiet=True)) == 0): logger.error( f'Query returns no data for {review_id}!\n' 'It is either invalid or marked as "private" by its owner.\n' 'A "private" review is only accessible by users with "View Private Changes" permission.' ) sys.exit(1) logger.debug(f'Querying on review ID {review_id}') status = "status:open+" if review_id not in self.requested_reviews else "" return self.query(f'/changes/?q={status}{review_id}') def get_changes_via_change_id(self, change_id): """Find all reviews for a given change ID""" logger.debug(f'Querying on change ID {change_id}') return self.query(f'/changes/?q=status:open+change:{change_id}') def get_changes_via_topic_id(self, topic): """Find all reviews for a given topic""" logger.debug(f'Querying on topic {topic}') return self.query(f'/changes/?q=status:open+topic:"{topic}"') def get_open_parents(self, review): """Find all open parent reviews for a given review""" reviews = dict() if not review or not review.parents: return reviews # Search recursively up via the parents until no more # open reviews are found for parent in review.parents: logger.debug(f'Querying on parent review sha: {parent}') p_review = self.query(f'/changes/?q=status:open+commit:{parent}') if not p_review: continue p_review_id = list(p_review.keys())[0] # Always single element reviews.update(p_review) reviews.update(self.get_open_parents(p_review[p_review_id])) logger.debug('Found parents: {}'.format(', '.join( [str(r) for r in reviews]))) return reviews def get_reviews(self, initial_args, id_type): """ From an initial set of parameters (review IDs, change IDs or topics), determine all relevant open reviews that will need to be applied to a repo sync via patching """ all_reviews = dict() stack = list() # Generate initial set of reviews from the initial set # of parameters, generating the stack (list of review IDs) # from the results for initial_arg in initial_args: reviews = getattr(self, f'get_changes_via_{id_type}_id')(initial_arg) review_ids = [r_id for r_id in reviews.keys()] logger.debug('Initial review IDs: {}'.format(', '.join( [str(r_id) for r_id in review_ids]))) stack.extend(review_ids) logger.info("Finding dependent reviews...") # From the stack, check each entry and add to the final set # of reviews if not already there and we have not already # applied a patch bearing the same change_id to a different # branch of the same project. # # We keep track of which change have been seen so far, and # look for any related reviews via change ID and topic, along # with any still open parents, adding to the stack as needed. # # All relevant reviews will have been found once the stack # is empty. while stack: review_id = stack.pop() reviews = self.get_changes_via_review_id(review_id) for new_id, review in reviews.items(): if new_id in all_reviews.keys(): continue all_reviews[new_id] = review change_reviews = self.get_changes_via_change_id( review.change_id) stack.extend([ r_id for r_id in change_reviews.keys() if r_id not in all_reviews.keys() ]) if review.topic is not None: topic_reviews = self.get_changes_via_topic_id(review.topic) stack.extend([ r_id for r_id in topic_reviews.keys() if r_id not in all_reviews.keys() ]) # No need to get the parents When using git Checkout # Checkout will get the parents automatically if self.patch_command != "Checkout": stack.extend([ r_id for r_id in self.get_open_parents(review) if r_id not in all_reviews.keys() ]) # When using checkout, remove parents from the reviews. # Checkout of a child will apply all its parents if self.patch_command == "Checkout": for r_id in sorted(all_reviews): for p_id in self.get_open_parents(all_reviews.get(r_id)): if p_id in all_reviews: del all_reviews[p_id] logger.info( f'Remove {p_id}. Checkout of its child review {r_id} ' 'already include the change.') for id, review in all_reviews.copy().items(): (_, manifest_branch) = self.get_project_path_and_branch_from_manifest( review.project) if (manifest_branch and id not in self.requested_reviews and review.branch not in self.whitelist_branches and review.branch != manifest_branch and not self.sha_re.match(manifest_branch)): # Note: in this conditional we REJECT changes which match all # four of these criteria: # - review ID was not explicitly requested # - review branch does not appear in whitelist_branches # - review branch does not match manifest revision # - manifest revision does not point at a sha logger.info( f" Ignoring {review._number} because it's for {review.branch}, manifest branch is {manifest_branch}" ) del all_reviews[id] logger.info('Final list of review IDs to apply: {}'.format(', '.join( [str(r_id) for r_id in all_reviews.keys()]))) return all_reviews def check_requested_reviews_applied(self): # If one or more of our requested reviews doesn't appear in applied reviews, # something the user asked for didn't happen. Error out with some info. if any(item not in self.applied_reviews for item in self.requested_reviews): logger.critical( "Failed to apply all explicitly-requested review IDs! " f'Requested: {self.requested_reviews} ' f'Applied: {self.applied_reviews}') sys.exit(1) elif self.requested_reviews: logger.info( f"All explicitly-requested review IDs applied! {self.requested_reviews}" ) def apply_single_review(self, review, proj_path): """ Given a single review object from Gerrit and a path, apply the git change to that path (using either checkout or cherry-pick as requested). """ if not os.path.exists(proj_path): # Project is missing on disk, but we expected to find it: # that's bad. logger.critical(f'***** Project {review.project} missing on disk! ' f'Expected to be in {proj_path}') sys.exit(5) try: logger.info( f'***** Applying http://review.couchbase.org/{review._number} to project {review.project}:' ) with cd(proj_path): subprocess.check_call(review.patch_command, shell=True) self.applied_reviews.append(review._number) except subprocess.CalledProcessError as exc: raise RuntimeError( f'Patch for review {review.id} failed: {exc.output}') logger.info( f'***** Done applying review {review._number} to project {review.project}\n' ) def patch_repo_sync(self, review_ids, id_type): """ Patch the repo sync with the list of patch commands. Repo sync is presumed to be in current working directory. """ # Compute full set of reviews reviews = self.get_reviews(review_ids, id_type) # Pull out any changes for the manifest project and apply them # first, to the local repo manifest. If there are any such changes, # re-run repo sync afterwards. manifest_changes_found = False for review_id in sorted(reviews.keys()): review = reviews[review_id] if review.project == self.manifest_project: manifest_changes_found = True del reviews[review_id] self.apply_single_review(review, os.path.join(".repo", "manifests")) self.manifest_stale = True # If there were manifest changes, re-run "repo sync" if manifest_changes_found: subprocess.check_call(['repo', 'sync', '--jobs=4']) for review_id in sorted(reviews.keys()): review = reviews[review_id] (path, branch) = self.get_project_path_and_branch_from_manifest( review.project) if (path, branch) == (None, None): logger.info( f"***** NOTE: ignoring review ID {review_id} for project " f"{review.project} that is either not part of the " "manifest, or was excluded due to manifest group filters.") continue self.apply_single_review(review, path) self.check_requested_reviews_applied()
class Reindexer: """Class for reindexing Gerrit changes""" def __init__(self): self.options = _parse_options() self._init_logger() credentials = self._authenticate() if self.options.cert: certs = os.path.expanduser(self.options.cert) self.api = GerritRestAPI(url=self.options.url, auth=credentials, verify=certs) else: self.api = GerritRestAPI(url=self.options.url, auth=credentials) def _init_logger(self): self.logger = logging.getLogger("Reindexer") self.logger.setLevel(logging.DEBUG) h = logging.StreamHandler() if self.options.verbose: h.setLevel(logging.DEBUG) else: h.setLevel(logging.INFO) formatter = logging.Formatter("%(message)s") h.setFormatter(formatter) self.logger.addHandler(h) def _authenticate(self): username = password = None if self.options.netrc: auth = HTTPBasicAuthFromNetrc(url=self.options.url) username = auth.username password = auth.password if not username: username = os.environ.get("USERNAME") if not password: password = os.environ.get("PASSWORD") while not username: username = input("user: "******"password: "******"since:{self.options.time}&start={start}&skip-visibility" for change in self.api.get(f"changes/?q={query}"): more_changes = change.get("_more_changes") is not None start += 1 yield change.get("_number") break def _query_to_file(self): self.logger.debug( f"writing changes since {self.options.time} to file {self.options.file}:" ) with open(self.options.file, "w") as output: for id in self._query(): self.logger.debug(id) output.write(f"{id}\n") def _reindex_chunk(self, chunk): self.logger.debug(f"indexing {chunk}") response = self.api.post( "/config/server/index.changes", chunk, ) self.logger.debug(f"response: {response}") def _reindex(self): self.logger.debug(f"indexing changes from file {self.options.file}") with open(self.options.file, "r") as f: with tqdm(unit="changes", desc="Indexed") as pbar: for chunk in _chunker(f, self.options.chunksize): self._reindex_chunk(chunk) pbar.update(len(chunk)) def execute(self): if self.options.time: self._query_to_file() else: self._reindex()