def file_feed(self, file): xmldoc, dfeed = feed.atom.new_xmldoc_feed() dfeed.title = settings.blog_name + " " + file dfeed.id = settings.blog_url + '/' + file dfeed.updated = self.atom_date(file) links = feed.atom.Link(settings.blog_url) dfeed.links.append(links) selflink = feed.atom.Link(settings.blog_url + '/' + file + '.atom') selflink.attrs['rel'] = "self" dfeed.links.append(selflink) for a in self.atom_authors(file): author = feed.atom.Author(a) dfeed.authors.append(author) (blobs, berr) = git.log(file=file, num=10, format="%H") for b in blobs.splitlines(): entry = feed.atom.Entry() (entryc, err) = git.log(file=file, num=1, extopts="--stat -p -M -C --full-index", rev=b) entry.content = "<![CDATA[\n" + entryc + "\n]]>" entry.title = "%s : %s" % (file, b) entry.id = settings.blog_url + '/' + file entry.updated = self.atom_date(file, rev=b) dfeed.entries.append(entry) return str(xmldoc)
def atom_date(self, file, rev=None): if rev == None: (arr, err) = git.log(file=file, num=1, format="%ai") else: (arr, err) = git.log(file=file, num=1, format="%ai", rev=rev) arr = arr.splitlines()[0].split() return arr[0] + 'T' + arr[1] + arr[2]
def log(parameters): git = Repo(".", search_parent_directories=True).git if parameters: return git.log(parameters) return git.log()
def list_generate(bad, good): notable_hashes = [] start = None end = None if git.is_ancestor('fix-pack-base-7010', bad): if git.is_ancestor('fix-pack-base-7010', good): start = bad if git.is_ancestor(bad, good) else good end = bad if start == good else good notable_hashes = sublist_generate(start + '~1', end) else: start = bad end = good notable_hashes = notable_hashes + sublist_generate( 'cc57347219da4911d30b154188a99c6a628f6079', end) notable_hashes = notable_hashes + sublist_generate( start + '~1', 'fix-pack-de-27-7010') else: if git.is_ancestor('fix-pack-base-7010', good): start = good end = bad notable_hashes = notable_hashes + sublist_generate( 'cc57347219da4911d30b154188a99c6a628f6079', end) notable_hashes = notable_hashes + sublist_generate( start + '~1', 'fix-pack-de-27-7010') else: bad_first = git.is_ancestor(bad, good) good_first = git.is_ancestor(good, bad) if bad_first and not good_first: start = bad end = good elif good_first and not bad_first: start = good end = bad else: common_ancestor = git.merge_base(bad, good).strip() bad_first = len( git.log('--pretty=%H', '%s..%s' % (common_ancestor, bad)).split('\n')) good_first = len( git.log('--pretty=%H', '%s..%s' % (common_ancestor, good)).split('\n')) start = common_ancestor end = bad if bad_first > good_first else good notable_hashes = sublist_generate(start + '~1', end) if end == good: notable_hashes[0]['status'] = 'good' notable_hashes[-1]['status'] = 'bad' else: notable_hashes[-1]['status'] = 'good' notable_hashes[0]['status'] = 'bad' generate_html(notable_hashes)
def sublist_generate(start, end): matching_tags = {} last_ticket_id = None for line in git.log('--date=short', '--format=format:%H %cd', '--simplify-by-decoration', '%s..%s' % (start, end)).split('\n'): commit_hash, commit_date = line.split()[0:2] commit_tag = git.tag('--points-at', commit_hash) if commit_tag.find('\n') != -1: candidate_tags = [ x for x in commit_tag.split('\n') if x.find('fix-pack-') == 0 ] if len(candidate_tags) > 0: commit_tag = candidate_tags[0] if commit_tag.find('fix-pack-') != 0: continue metadata = { 'hash': commit_tag, 'date': commit_date, 'ticket': '', 'status': None } matching_tags[commit_tag] = metadata matching_tags[commit_hash] = metadata notable_hashes = [] for line in git.log('--date=short', '--pretty=%H %cd %s', '%s..%s' % (start, end)).split('\n'): commit_hash, commit_date, ticket_id = line.split()[0:3] if commit_hash in matching_tags: notable_hashes.append(matching_tags[commit_hash]) continue if ticket_id != ticket_id.upper(): continue if last_ticket_id == ticket_id: continue last_ticket_id = ticket_id notable_hashes.append({ 'hash': commit_hash, 'date': commit_date, 'ticket': ticket_id, 'status': None }) return notable_hashes
def check_diff_between_branches(base_branch): only_in_base_branch = git.log("HEAD.." + base_branch, oneline = True) only_in_active_branch = git.log(base_branch + "..HEAD", oneline = True) if len(only_in_base_branch) != 0: log_fatal_and_exit(" '%s' is ahead of current branch by %s commits. Rebase " "and try again.", base_branch, len(only_in_base_branch.split("\n"))) if len(only_in_active_branch) == 0: log_fatal_and_exit(" Current branch is same as '%s'. Exiting...", base_branch) if len(only_in_active_branch.split("\n")) > 1: log_fatal_and_exit(" Current branch is ahead of '%s' by %s commits. Squash into single " "commit and try again.", base_branch, len(only_in_active_branch.split("\n")))
def checkHistory(self): """ Check history of known tests """ git = self.__getGitCmd() # git log --decorate=full --since=1 --simplify-by-decoration / # --pretty=%H|%aN|%ae|%ai|%d --follow HEAD checkDays = int(settings.CHECK_COMMMITS_PREVIOUS_DAYS) if not checkDays: checkDays = 1 tests = Test.objects.filter(git=self, is_enable=True).only('folder') if len(tests) == 0: self.__getLog().warning("repository %s is empty (0 tests)" % self.name) else: self.__getLog().info( "repository %s contiants %d tests" % (self.name, len(tests))) for test in tests: if not test.folder: self.__getLog().warning("The GIT folder for test '%s'" " is not declared." % test.name) continue path_dir = "%s/%s" % (self.path_absolute, test.folder) if not os.path.exists(path_dir): self.__getLog().warning("Test %s doesn't exists" % path_dir) continue rows = git.log('--decorate=full', '--since=%s.days' % checkDays, '--simplify-by-decoration', '--pretty=%H|%aN|%ae|%ai|%d', '--follow', 'HEAD', "%s" % path_dir)\ .split('\n') self.__saveCommits(test, rows)
def atom_authors(self, arr): (arr, err) = git.log(file=arr, format="%an") res = [] for a in arr.splitlines(): if a not in res: res.append(a) return res
def git_hash_time(hash, folders): changes = set() changes_file = get_rd_file('changes.txt') if os.path.exists(changes_file): with open(changes_file, 'r') as f: changes = set([line.strip() for line in f.readlines()]) mtime = git.log('-1', hash, '--pretty=format:%ct') if mtime == '': return mtime = int(mtime) for folder in folders: folder = folder.strip() source_folder = '%s/src/main/java' % folder if not os.path.isdir(source_folder): source_folder = '%s/docroot/WEB-INF/src' % folder if not os.path.isdir(source_folder): source_folder = '%s/src' % folder if not os.path.isdir(source_folder): continue for file in git.ls_files(source_folder).split('\n'): if file not in changes and os.path.isfile(file): os.utime(file, (mtime, mtime))
def getparent_origin(): remote_refs = git.for_each_ref('--format=%(refname)', 'refs/remotes/').split('\n') origin_branches = [ ref[len('refs/remotes/'):] for ref in remote_refs if ref.find('refs/remotes/origin') == 0 and ref[-5:] != '/HEAD' ] upstream_branches = [ ref[len('refs/remotes/'):] for ref in remote_refs if ref.find('refs/remotes/upstream') == 0 and ref[-5:] != '/HEAD' ] closest_branch = None closest_branch_diff = -1 for branch_set in [upstream_branches, origin_branches]: for branch in branch_set: short_branch = branch[branch.find('/') + 1:] if short_branch == current_branch or short_branch == closest_branch or not git.is_ancestor( branch, current_branch): continue branch_diff = len( git.log('--pretty=%H', '%s..%s' % (branch, current_branch))) if closest_branch is None or branch_diff < closest_branch_diff: closest_branch = short_branch closest_branch_diff = branch_diff if closest_branch is not None: return closest_branch return current_branch
def get_hash_info(commit_hash): commit_date, ticket_id = git.log('-1', '--date=short', '--pretty=%cd %s', commit_hash).strip().split()[0:2] if ticket_id != ticket_id.upper(): ticket_id = None return commit_date, ticket_id
def commit_author_time_and_branch_ref(run, master_branch): get_refs = for_each_ref('refs/remotes/origin/**', format='%(refname:short) %(authordate:unix)') with run(get_refs) as program: for branch, t in columns(program.stdout): get_time = log(f"{master_branch}..{branch}", format='%at') with run(get_time) as inner_program: for author_time, in columns(inner_program.stdout): yield int(author_time), branch
def clone(git_params, repo_path, logfile, workDir=None, clone_once=False, **kwargs): git = Git(git_params, repo_path, clone_once, logfile) git.clone(**kwargs) if git.repo_cloned: git.checkout() git.apply_patch(workDir) git.log()
def getCommits(self, git, n = None, after = None, before = None): params = [] params.append('--pretty=format:"%H"') if n: params.append('-n ' + str(n)) if after: params.append('--after=' + after) if before: params.append('--before=' + before) info = git.log(params) commits = info.strip('"').split('"\n"') return commits
def log(item=""): """ Show the history log of an item @param item the item to be show the log. If not provided, the complete history of the repository will be shown @return The history of the item """ return git.log(item)
def merge(repo_dir, gaia_url, branch_to, branch_from): git.delete_gaia(repo_dir) t = util.time_start() if os.path.exists(repo_dir): print "Updating Gaia" git.update_gaia(repo_dir, gaia_url) print "Updated Gaia in %0.2f seconds" % util.time_end(t) else: print "Creating Gaia" git.create_gaia(repo_dir, gaia_url) # This is sadly broken print "Created Gaia in %0.2f seconds" % util.time_end(t) print "Merging %s into branch %s" % (branch_from, branch_to) if not branch_to in git.branches(repo_dir): print >> sys.stderr, "Asking to merge into a branch that doesn't exist (%s)" % branch_to return None if not branch_from in git.branches(repo_dir): print >> sys.stderr, "Asking to merge from a branch that doesn't exist (%s)" % branch_from return None git.checkout(repo_dir, branch_to) start_commit = git.get_rev(repo_dir) git.merge(repo_dir, branch_from, strategy="recursive") end_commit = git.get_rev(repo_dir) print "Merge range is %s..%s" % (start_commit[:7], end_commit[:7]) print git.log(repo_dir, "%s..%s" % (start_commit, end_commit), pretty="oneline") print "Dry Run push" git.push(repo_dir, remote="origin", branches=[branch_to], dry_run=True) info = git.push(repo_dir, remote="origin", branches=[branch_to]) print "Would be pusing to %s" % info["url"] for branch in info["branches"].keys(): s, e = info["branches"][branch] print " %s: %s..%s" % (branch, s, e) if util.ask_yn("Push for realises?"): info = git.push(repo_dir, remote="origin", branches=[branch_to], dry_run=False) print "Pushed to %s" % info["url"] for branch in info["branches"].keys(): s, e = info["branches"][branch] print " %s: %s..%s" % (branch, s, e) comment(repo_dir, branch_to, "%s..%s" % (start_commit, end_commit))
def open_patcher_portal(): fix_id = None fix_name = None if current_branch.find('patcher-') == 0: fix_id = current_branch[len('patcher-'):] elif current_branch.find('fix-pack-fix-') == 0: fix_id = current_branch[len('fix-pack-fix-'):] for typeFilter in ['0', '1', '6', '2']: if fix_id is None: fix_id, fix_name = get_fix_id(typeFilter) if fix_id is None: print('No existing fix to update, opening window for a new fix...') base_url = 'https://patcher.liferay.com/group/guest/patching/-/osb_patcher/fixes/create' else: print('Opening window to update fix %s...' % fix_id) base_url = 'https://patcher.liferay.com/group/guest/patching/-/osb_patcher/fixes/%s/edit' % fix_id product_version, project_version = get_baseline_id() origin_name = sys.argv[1] parameters = { 'patcherProductVersionId': product_version, 'patcherProjectVersionId': project_version, 'committish': current_branch, 'gitRemoteURL': origin_name } if fix_name is not None: parameters['patcherFixName'] = fix_name elif fix_id is not None: parameters['patcherFixName'] = get_fix_name_from_id(fix_id) else: pattern = re.compile('LP[EPS]-[0-9]*') fixes = set() for line in git.log('%s..%s' % (base_tag, 'HEAD'), '--pretty=%s').split('\n'): fixes.update(pattern.findall(line)) parameters['patcherFixName'] = ','.join(sorted(fixes)) namespaced_parameters = get_namespaced_parameters( '1_WAR_osbpatcherportlet', parameters) query_string = '&'.join([ '%s=%s' % (key, value) for key, value in namespaced_parameters.items() ]) webbrowser.open_new_tab('%s?%s' % (base_url, query_string))
def get_candidate_fix_names(): pattern = re.compile('LP[EPS]-[0-9]*') if current_branch.find('LPE-') == 0 or current_branch.find('LPP-') == 0 or current_branch.find('LPS-') == 0: yield ','.join(sorted(pattern.findall(current_branch))) fixes = set() for line in git.log('%s..%s' % (base_tag, 'HEAD'), '--pretty=%s').split('\n'): fixes.update(pattern.findall(line)) if len(fixes) > 0: yield ','.join(sorted(fixes))
def guess_bug_id(repo_dir, commit): def _show_bugs(): print "Possible bugs:" for i in range(0, len(possible_bug_ids)): print "%d) Bug %s: %s" % (i, possible_bug_ids[i], bug_summaries.get(possible_bug_ids[i], "Closed bug")) msg = git.log(repo_dir, commit, number=1, pretty="%B") bug_ids = [] possible_bug_ids = [] bug_summaries = {} for pattern in bug_id_patterns: possible_bug_ids.extend([int(x) for x in pattern.findall(msg)]) possible_bug_ids = list(set(sorted(possible_bug_ids))) for bug in possible_bug_ids: bug_data = bzapi.fetch_bug(bug, include_fields="summary") if bug_data.has_key("summary"): bug_summaries[bug] = bug_data["summary"] print "Commit %s has a body of:\n%s" % (commit, msg.strip()) print "-" * 80 if len(possible_bug_ids) == 1: print "I only found one bug:\nBug %s -- %s" % ( possible_bug_ids[0], bug_summaries.get(possible_bug_ids[0], "Closed bug"), ) if util.ask_yn("Use it?"): bug_ids = possible_bug_ids elif len(possible_bug_ids) > 0: _show_bugs() index = None prompt = "index, 'clear' or 'done' %s: " _input = raw_input(prompt % str(bug_ids)) while _input.strip().lower() != "done": if _input.strip().lower() == "clear": bug_ids = [] else: try: i = int(_input.strip()) if possible_bug_ids[i] in bug_ids: print "Dude, this is already there!" else: bug_ids.append(possible_bug_ids[i]) except: print "try again!" pass _input = raw_input(prompt % str(bug_ids)) else: print "There are no bugs to guess from" print "=" * 80 return bug_ids
def list_generate(bad, good): last_ticket_id = None notable_hashes = [] for line in git.log('--date=short', '--pretty=%H %cd %s', bad + '...' + good).split('\n'): commit_hash, commit_date, ticket_id = line.split()[0:3] if ticket_id != ticket_id.upper(): continue if last_ticket_id == ticket_id: continue last_ticket_id = ticket_id notable_hashes.append({ 'hash': commit_hash, 'date': commit_date, 'ticket': ticket_id, 'status': None }) bad_date, bad_ticket_id = get_hash_info(bad) bad_hash = { 'hash': bad, 'date': bad_date, 'ticket': bad_ticket_id, 'status': 'bad' } good_date, good_ticket_id = get_hash_info(good) good_hash = { 'hash': good, 'date': good_date, 'ticket': good_ticket_id, 'status': 'good' } if git.is_ancestor(bad, good): notable_hashes.insert(0, good_hash) notable_hashes.append(bad_hash) else: notable_hashes.insert(0, bad_hash) notable_hashes.append(good_hash) generate_html(notable_hashes)
def checkHistory(self): """ Check history of known tests """ git = self.__getGitCmd() # git log --decorate=full --since=1 --simplify-by-decoration / # --pretty=%H|%aN|%ae|%ai|%d --follow HEAD checkDays = int(settings.CHECK_COMMMITS_PREVIOUS_DAYS) if not checkDays: checkDays = 1 tests = Test.objects.filter(git=self, is_enable=True).only('folder') if len(tests) == 0: self.__getLog().warning("repository %s is empty (0 tests)" % self.name) else: self.__getLog().info("repository %s contiants %d tests" % (self.name, len(tests))) for test in tests: if not test.folder: self.__getLog().warning("The GIT folder for test '%s'" " is not declared." % test.name) continue path_dir = "%s/%s" % (self.path_absolute, test.folder) if not os.path.exists(path_dir): self.__getLog().warning("Test %s doesn't exists" % path_dir) test.is_enable = False test.save() continue rows = git.log('--decorate=full', '--since=%s.days' % checkDays, '--simplify-by-decoration', '--pretty=%H|%aN|%ae|%ai|%d', '--follow', 'HEAD', "%s" % path_dir)\ .split('\n') self.__saveCommits(test, rows)
def __call__(self): self.log.write('New changesets have been pushed.\n') # See if the push changed something in the master branch git.reset('origin/master') current_id = git.id() git.fetch() git.reset(self.revision) if current_id != self.revision: self.log.write('The master branch has new commits.\n') # Make a new development snapshot builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'openclonk', False) # TODO: Remove all other snapshot builders from the queue self.queue.put(50, builder) # Also make a new mape build. In principle we could do this only if something in the # mape directory or any of the other files used by mape change, but let's keep it simple here. builder = snapshotbuilder.SnapshotBuilder(self.amqp_connection, self.revision, self.log, 'mape', False) # TODO: Remove all other snapshot builders from the queue self.queue.put(70, builder) # See if something in the docs directory has changed log = git.log('docs', current_id, self.revision, 'oneline') if len(log) > 1 or (len(log) == 1 and log[0] != current_id): # TODO: Remove all other doc builders from the queue builder = docbuilder.DocBuilder(self.revision, self.log) self.queue.put(80, builder) else: self.log.write('The master branch has no new commits.\n') # TODO: Make a release if the version changed return True
def _check_new_branch(self, branch, input_work_dir, output_work_dir, output_reference): # try to find merge-base in input tree, # assumes master branch is always mirrored # # this handles the case of having created a new branch, # and asking for that to be mirrored into the prune tree. base_id = git.merge_base('origin/' + self.master, 'origin/' + branch, tree=input_work_dir) git.clone(output_reference, output_work_dir, options=['-q']) git.set_origin_url(self._output_tree, output_work_dir) # try to find the merge-base or its parent/grandparent/... in the # output tree - since it should have been branched from the master # (or in any case we look at the merge-base between master and it) # this should exist - HOWEVER: some commits might not be mirrored, # so look for the *parent [with a reasonable limit] for offset in range(0, self.max_parents): search = git.rev_parse(rev='%s~%d' % (base_id, offset), tree=input_work_dir) self.debug('search for %s~%d=%s:' % (base_id, offset, search)) grep = '%s: %s' % (self._commit_id_prefix, search) out_commits = git.log(options=[ '--grep', grep, '--format=format:%H', 'origin/' + self.output_branch_name(self.master) ], tree=output_work_dir) out_commits = out_commits.split() if not out_commits or len(out_commits) > 1: self.debug('not found') continue start_commit = out_commits[0] self.debug('found at %s' % start_commit) return start_commit raise Exception('failed to find parent/grandparent/...')
def comment(repo_dir, branch_to, commit_range, dry_run=False): all_commits = git.log(repo_dir, commit_range, pretty="%H").strip().split("\n") comments = {} commits_without_bugs = [] assert branch_to in git.branches(repo_dir), "branch parameter must be a branch" i = 0 for commit in all_commits: i += 1 print "bug %d of %d" % (i, len(all_commits)) bug_ids = guess_bug_id(repo_dir, commit) if bug_ids is None or len(bug_ids) == 0: commits_without_bugs.append(commit) else: for bug_id in bug_ids: if not bug_id in comments.keys(): comments[bug_id] = [] comments[bug_id].append(commit) failed_bugs = [] for bug_id in comments.keys(): comment = [] flags = branch_logic.flags_to_set([branch_to]) for commit in comments[bug_id]: comment.append("v1.1.0hd: %s" % commit) comment = "\n".join(comment) print "Commenting on bug %s with:\ntext: %s\nflags: %s" % (bug_id, comment, flags) if not dry_run: try: bzapi.update_bug(bug_id, comment=comment, values=flags) except: failed_bugs.append(bug_id) print "The following commits do not have a bug associated with them:\n%s" % commits_without_bugs print "Failed to comment on the following bugs:\n%s" % failed_bugs
def run(): result = git.log() raw_entries = [line for line in result.stdout.split("\n")] entries = [Entry.from_line(line) for line in raw_entries if line] for entry in entries: sys.stdout.write(entry.commit_hash + " " + entry.message + "\n")
def plotCommitsByDate(git): git.log().showCommitsByDate()
def _check_new_branch(self, branch, input_work_dir, output_work_dir, output_reference): # This handles the case of having created a new branch, # and asking for that to be mirrored into the prune tree. # # Try to find a starting point in the input tree. We assume # this will basically always succeed, since the master branch # is always mirrored, we _should_ find something (but can fail # if somebody created a branch without a merge-base.) # unfortunately we now need to do this first, to sort out which # branches we already know in the output git.clone(output_reference, output_work_dir, options=['-q']) self.debug("trying to find starting point for new branch %s" % branch) candidate_branches = [] for other in self._branches: if other == branch: continue out_branch = 'origin/' + self.output_branch_name(other) try: git.rev_parse(out_branch, tree=output_work_dir) except: self.debug(" branch %s doesn't exist in output %s (yet)" % (out_branch, output_work_dir)) continue candidate_branches.append(other) potential_merge_bases = [] for other in candidate_branches: try: base = git.merge_base('origin/' + other, 'origin/' + branch, tree=input_work_dir) potential_merge_bases.append(base) self.debug(" base to %s is %s" % (other, base)) except git.GitError: self.debug(" no base to %s" % (other, )) bases = git.independent_commits(potential_merge_bases, tree=input_work_dir) self.debug("found starting points %s" % (", ".join(bases))) assert len(bases) == 1, "No single merge base found: %r" % bases base_id = bases[0] base_branch = None for other in candidate_branches: if git.merge_base('origin/' + other, base_id, tree=input_work_dir) == base_id: base_branch = 'origin/' + self.output_branch_name(other) break assert base_branch, "This shouldn't happen, found no base branch?!" # try to find the merge-base or its parent/grandparent/... in the # output tree - since it should have been branched from the master # (or in any case we look at the merge-base between master and it) # this should exist - HOWEVER: some commits might not be mirrored, # so look for the *parent [with a reasonable limit] for offset in range(0, self.max_parents): search = git.rev_parse(rev='%s~%d' % (base_id, offset), tree=input_work_dir) self.debug('search for %s~%d=%s:' % (base_id, offset, search)) grep = '%s: %s' % (self._commit_id_prefix, search) out_commits = git.log( options=['--grep', grep, '--format=format:%H', base_branch], tree=output_work_dir) out_commits = out_commits.split() if not out_commits or len(out_commits) > 1: self.debug('not found') continue start_commit = out_commits[0] self.debug('found at %s' % start_commit) return start_commit raise Exception('failed to find parent/grandparent/...')
rb_id = get_review_board_id_if_present(issue_url, rb_link_title) # If no review board link found, create new review request and add its link to jira. if rb_id is None: reviews_url = "https://reviews.apache.org/api/review-requests/" data = {"repository" : "hbase-git"} r = requests.post(reviews_url, data = data, auth = rb_auth) assert_status_code(r, 201, "creating new review request") review_request = json.loads(r.content)["review_request"] absolute_url = review_request["absolute_url"] logger.info(" Created new review request: %s", absolute_url) # Use jira summary as review's summary too. summary = get_jira_summary(issue_url) # Use commit message as description. description = git.log("-1", pretty="%B") update_draft_data = {"bugs_closed" : [args.jira_id.upper()], "target_groups" : "hbase", "target_people" : args.reviewers, "summary" : summary, "description" : description } draft_url = review_request["links"]["draft"]["href"] r = requests.put(draft_url, data = update_draft_data, auth = rb_auth) assert_status_code(r, 200, "updating review draft") draft_request = json.loads(r.content)["draft"] diff_url = draft_request["links"]["draft_diffs"]["href"] files = {'path' : (patch_filename, open(patch_filepath, 'rb'))} r = requests.post(diff_url, files = files, auth = rb_auth) assert_status_code(r, 201, "uploading diff to review draft") r = requests.put(draft_url, data = {"public" : True}, auth = rb_auth) assert_status_code(r, 200, "publishing review request")
def getLogData(git): numstat = git.log(opt='--numstat').wordsAddRemInfo() return numstat
def test_log_normal(): assert log() == ["git", "log"]
def test_log_limit(): assert log(limit=10) == ["git", "log", "-10"]
def test_log_format(): assert log(format='%a') == ["git", "log", "--format=%a"]
def test_log_selector(): assert log('selector') == ["git", "log", "selector"]
rb_id = get_review_board_id_if_present(issue_url, rb_link_title) # If no review board link found, create new review request and add its link to jira. if rb_id is None: reviews_url = "https://reviews.apache.org/api/review-requests/" data = {"repository": "hbase-git"} r = requests.post(reviews_url, data=data, auth=rb_auth) assert_status_code(r, 201, "creating new review request") review_request = json.loads(r.content)["review_request"] absolute_url = review_request["absolute_url"] logger.info(" Created new review request: %s", absolute_url) # Use jira summary as review's summary too. summary = get_jira_summary(issue_url) # Use commit message as description. description = git.log("-1", pretty="%B") update_draft_data = { "bugs_closed": [args.jira_id.upper()], "target_groups": "hbase", "target_people": args.reviewers, "summary": summary, "description": description } draft_url = review_request["links"]["draft"]["href"] r = requests.put(draft_url, data=update_draft_data, auth=rb_auth) assert_status_code(r, 200, "updating review draft") draft_request = json.loads(r.content)["draft"] diff_url = draft_request["links"]["draft_diffs"]["href"] files = {'path': (patch_filename, open(patch_filepath, 'rb'))} r = requests.post(diff_url, files=files, auth=rb_auth)
def find_git_commit(row): res = git.log("--all", f"--grep={row} ") if res != "": return row return None