def find(query, components): conn = DB.getConn() c = conn.cursor() c.execute(query, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() DB.execute(c, "SELECT * from " + DB.commitkeyword._table + " WHERE commitid IN (" + allcommitids + ")") commitkeywords = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] keywords = [keyword[DB.commitkeyword.keyword] for keyword in commitkeywords if keyword[DB.commitkeyword.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files, keywords) commits.append(c) return commits
def parse_commit(self, raw_commit): commit_lines = raw_commit.split('\n') h, ae = commit_lines.pop(0).split('\t') commit = Commit(h, ae) for commit_line in commit_lines: insertions, deletions, path = commit_line.split('\t') commit.add_diffstat(insertions, deletions, path) return commit
def commits(self, start='master', path='', max_count=10, skip=0): """ A list of Commit objects representing the history of a given ref/commit ``start`` is the branch/commit name (default 'master') ``path`` is an optional path to limit the returned commits to Commits that do not contain that path will not be returned. ``max_count`` is the maximum number of commits to return (default 10) ``skip`` is the number of commits to skip (default 0) which will effectively move your commit-window by the given number. Returns ``git.Commit[]`` """ options = {'max_count': max_count, 'skip': skip} return Commit.find_all(self, start, path, **options)
def findByKeywords(keywords): conn = DB.getConn() c = conn.cursor() getcommitsSQL = "SELECT c.*, r.* " + \ "FROM " + DB.commit._table + " c " + \ "INNER JOIN " + DB.repo._table + " r " + \ " ON r.id = c.repoid " whereClause = " 1=1 " components = [] if keywords: keywordsTree = KeywordsParser(keywords) getcommitsSQL += "LEFT OUTER JOIN " + DB.commitkeyword._table + " ck " + \ " ON c.id = ck.commitid " whereClause, components = keywordsTree.getWhereClause("ck.keyword", "r.tagname", "r.maturity") getcommitsSQL += "WHERE " + whereClause getcommitsSQL += "ORDER BY c.date DESC " c.execute(getcommitsSQL, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first c.execute("SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files) commits.append(c) return commits
def commit_diff(self, commit): """ The commit diff for the given commit ``commit`` is the commit name/id Returns ``git.Diff[]`` """ return Commit.diff(self, commit)
def __getitem__(self, key): if isinstance(key, slice): li = [] step = key.step if step: for idx, c in enumerate(Commit.get_raw_commits(self, key.start, key.stop)): if not (idx % step): li.append(Commit(raw_commit=c, obj=self)) else: for c in Commit.get_raw_commits(self, key.start, key.stop): li.append(Commit(raw_commit=c, obj=self)) return li elif isinstance(key, str): return Commit(key, obj=self) else: raise TypeError(str(type(key)) + " " + str(key))
def getCommits(repo, startdate, enddate): end_rev = pysvn.Revision(pysvn.opt_revision_kind.date, enddate) start_rev = pysvn.Revision(pysvn.opt_revision_kind.date, startdate) c = pysvn.Client() commits = [] msgs = c.log(repo.url, revision_start=start_rev, revision_end=end_rev, discover_changed_paths=True) msgs.reverse() for m in msgs: date = m.data['revprops']['svn:date'] message = m.data['message'] paths = [p.path for p in m.data['changed_paths']] c = Commit() c.loadFromSource(repo, message, date, paths, m.data['revision'].number) commits.append(c) return commits
def findByIDs(project, uniqueid): conn = DB.getConn() c = conn.cursor() getcommitsSQL = "SELECT c.*, r.* " + \ "FROM " + DB.commit._table + " c " + \ "INNER JOIN " + DB.repo._table + " r " + \ " ON r.id = c.repoid " whereClause = " 1=1 " components = [] if project and uniqueid: whereClause += "AND r.tagname = %s AND c.uniqueid = %s " components = [project, uniqueid] getcommitsSQL += "WHERE " + whereClause getcommitsSQL += "ORDER BY c.date DESC " DB.execute(c, getcommitsSQL, components) commitrows = c.fetchall() commitfiles = [] if commitrows: allcommitids = ",".join([str(int(commit[0])) for commit in commitrows]) #This is poor practice, but we assured ourselves the value is composed only of ints first DB.execute(c, "SELECT * from " + DB.commitfile._table + " WHERE commitid IN (" + allcommitids + ")") commitfiles = c.fetchall() commits = [] for i in commitrows: r = Repo() r.loadFromValues(i[DB.commit._numColumns + 0], i[DB.commit._numColumns + 1], i[DB.commit._numColumns + 2], i[DB.commit._numColumns + 3], i[DB.commit._numColumns + 4], i[DB.commit._numColumns + 5]) files = [file[DB.commitfile.file] for file in commitfiles if file[DB.commitfile.commitid] == i[DB.commit.id]] c = Commit() c.loadFromDatabase(r, i, files) commits.append(c) return commits
def getCommits(repo, startdate, enddate): localfolder = urlToFolder(repo.url) differ = gdiff.diff_match_patch() repoloc = "git-repos/" + localfolder + "/" if os.path.exists(repoloc): c = pygit.Repo(repoloc) else: os.makedirs(repoloc) c = pygit.Repo.init(repoloc) c.create_remote("origin", repo.url) c.remotes.origin.fetch() c.remotes.origin.pull("master") commits = [] msgs = c.iter_commits(since=unixToGitDateFormat(startdate)) for m in msgs: if m.committed_date > enddate: continue alldiffs = [] for d in m.diff("HEAD~1").iter_change_type("M"): # Changed left = d.a_blob.data_stream.read() right = d.b_blob.data_stream.read() diffs = differ.diff_main(left, right) if diffs: differ.diff_cleanupSemantic(diffs) for d in diffs: if d[0] != 0 and d[1].strip(): alldiffs.append(d) for d in m.diff().iter_change_type("A"): # Added pass for d in m.diff().iter_change_type("D"): # Deleted pass for d in m.diff().iter_change_type("R"): # Renamed pass c = Commit() c.loadFromSource(repo, m.message, m.committed_date, m.stats.files.keys(), m.__str__(), alldiffs) commits.append(c) return commits
def test_commit(data, expected_date, is_pass): if not is_pass: with pytest.raises(Exception) as e_info: # Expecting an exception commit = Commit(data) commit.get_date() else: # Shouldn't get an exception commit = Commit(data) assert commit.get_date() == expected_date, "Date or the format did not match " \ "the expected date: %s" % expected_date
def parse(cls, json_as_text): l = [] data = json.loads(json_as_text) for item in data: l.append({ 'uri': item['uri'], 'name': item['name'], 'activity': item['activity'], 'latest': Commit.from_dict(item['latest']) }) return l
def __init__(self, git_tag, _files): self._commit = Commit.init_commit_by_git_commit(git_tag.commit, files=[git_tag.name]) self._name = os.path.normpath(git_tag.name) self.committed_files = _files self.files = set( filter( lambda x: x.endswith(".java"), git_tag.commit.repo.git.ls_tree("-r", "--name-only", git_tag.name).split())) self.version_files = None
def commit_count(self, start = 'master'): """ The number of commits reachable by the given branch/commit ``start`` is the branch/commit name (default 'master') Returns int """ return Commit.count(self, start)
def process_commands(server): while True: if (commands.qsize() > 0): command = commands.get() try: commit = Commit(command[0]) log_add(commit) commits.put((commit, command[1])) except ValueError as e: response = str(e) server.sendto(response.encode(), command[1])
def commit_deltas_from(self, other_repo, ref='master', other_ref='master'): """ Returns a list of commits that is in ``other_repo`` but not in self Returns git.Commit[] """ repo_refs = self.git.rev_list(ref, '--').strip().splitlines() other_repo_refs = other_repo.git.rev_list(other_ref, '--').strip().splitlines() diff_refs = list(set(other_repo_refs) - set(repo_refs)) return map(lambda ref: Commit.find_all(other_repo, ref, max_count=1)[0], diff_refs)
def _get_commits_files(repo): data = repo.git.log('--numstat','--pretty=format:"sha: %H"').split("sha: ") comms = {} for d in data[1:]: d = d.replace('"', '').replace('\n\n', '\n').split('\n') commit_sha = d[0] comms[commit_sha] = [] for x in d[1:-1]: insertions, deletions, name = x.split('\t') names = Commit.fix_renamed_files([name]) comms[commit_sha].extend(list(map(lambda n: CommittedFile(commit_sha, n, insertions, deletions), names))) return dict(map(lambda x: (repo.commit(x), comms[x]), filter(lambda x: comms[x], comms)))
def getCommits(repo, startdate, enddate): localfolder = urlToFolder(repo.url) repoloc = 'git-repos/' + localfolder + '/' if os.path.exists(repoloc): c = pygit.Repo(repoloc) else: os.makedirs(repoloc) c = pygit.Repo.init(repoloc) c.create_remote('origin', repo.url) c.remotes.origin.fetch() c.remotes.origin.pull('master') commits = [] msgs = c.iter_commits(since=unixToGitDateFormat(startdate)) for m in msgs: if m.committed_date > enddate: continue c = Commit() c.loadFromSource(repo, m.message, m.committed_date, m.stats.files.keys(), m.__str__()) commits.append(c) return commits
def fetch_commits(autogen=False): """ Yields all fully-processed commits. """ cursor = conn.execute(r''' SELECT repo, sha, time, message, status, perplexity FROM {view} '''.format(view='commits_autogen' if autogen else 'commits')) for row in cursor: yield Commit(*row)
def getCommits(repo, startdate, enddate): end_rev = pysvn.Revision(pysvn.opt_revision_kind.date, enddate) start_rev = pysvn.Revision(pysvn.opt_revision_kind.date, startdate) c = pysvn.Client() commits = [] msgs = c.log(repo.url, revision_start=start_rev, revision_end=end_rev, discover_changed_paths=True) msgs.reverse() for m in msgs: date = m.data['revprops']['svn:date'] message = m.data['message'] paths = [p.path for p in m.data['changed_paths']] c = Commit() c.loadFromSource(repo, message, date, paths, m.data['revision'].number, []) commits.append(c) return commits
def extract_files_commits(obj_git): data = obj_git.connect.git.log( '--numstat', '--name-status', '--pretty=format:"sha: %H parents: %P"').split("sha: ") comms = {} files = {} for d in data[1:]: d = d.replace('"', '').replace('\n\n', '\n').split('\n') sha, parent = d[0].split(" parents: ") commit_sha = sha comms[commit_sha] = [parent.split(" "), []] for x in d[1:-1]: try: split = x.split('\t') status, name = split[0], split[1:] if len(name) == 1: name = name[0] if status.startswith("R"): if name[0].endswith( ".java") and not name[0].endswith("Test.java"): if len(parent.split(" ")) > 1: print(f"{name[0]}") name = Commit.fix_renamed_files(name) comms[commit_sha][1].extend([name]) # files.setdefault(name, []).append(commit_sha) elif name.endswith(".java") and not name.endswith("Test.java"): if status != 'A' and status != "D": if len(parent.split(" ")) > 1: print(f"{name}") name = Commit.fix_renamed_files([name])[0] comms[commit_sha][1].extend([name]) # files.setdefault(name, []).append(commit_sha) except Exception as e: print(e) print(x) pass return dict( map(lambda x: (x, comms[x]), filter(lambda x: comms[x][1], comms))), files
def commit_count(self, start='master', path=''): """ The number of commits reachable by the given branch/commit ``start`` is the branch/commit name (default 'master') ``path`` is an optinal path Returns int """ return Commit.count(self, start, path)
def fetch_commit_by_sha(sha): cursor = conn.execute( ''' SELECT repo, sha, time, message, status, perplexity FROM commits WHERE sha = ? ''', (sha, )) try: return Commit(*cursor.fetchone()) except TypeError: raise KeyError(sha)
def log(self, commit='master', path=None, **kwargs): """ The commit log for a treeish Returns ``git.Commit[]`` """ options = {'pretty': 'raw'} options.update(kwargs) arg = [commit, '--'] if path: arg.append(path) commits = self.git.log(*arg, **options) return Commit.list_from_string(self, commits)
def commit_deltas_from(self, other_repo, ref='master', other_ref='master'): """ Returns a list of commits that is in ``other_repo`` but not in self Returns ``git.Commit[]`` """ repo_refs = self.git.rev_list(ref, '--').strip().splitlines() other_repo_refs = other_repo.git.rev_list(other_ref, '--').strip().splitlines() diff_refs = list(set(other_repo_refs) - set(repo_refs)) return map( lambda ref: Commit.find_all(other_repo, ref, max_count=1)[0], diff_refs)
def commit_count(self, start='master', path=''): """ The number of commits reachable by the given branch/commit ``start`` is the branch/commit name (default 'master') ``path`` is an optional path Commits that do not contain the path will not contribute to the count. Returns ``int`` """ return Commit.count(self, start, path)
def commits_between(self, frm, to): """ The Commits objects that are reachable via ``to`` but not via ``frm`` Commits are returned in chronological order. ``from`` is the branch/commit name of the younger item ``to`` is the branch/commit name of the older item Returns ``git.Commit[]`` """ return reversed(Commit.find_all(self, "%s..%s" % (frm, to)))
def get_commits(data): sep = 72 * '-' index = 0 commits = [] while (index < len(data) and index >= 0): if index + 1 >= len(data): break details = data[index + 1].split(' | ') changes = get_changes(data, index + 3) comment = get_comment(data, index + 3) commits.append(Commit(details, changes, comment)) index = data.index(sep, index + 1) return commits
def fetch_commits_by_repo(repo_name): cursor = conn.execute( r''' SELECT repo, sha, time, message FROM commits_raw WHERE repo = :repo ''', {'repo': repo_name}) for row in cursor: yield Commit(repo=row[0], sha=row[1], time=row[2], message=row[3], status=None, perplexity=None)
def process_commands(server): while True: command = commands.get(block=True) try: commit = Commit(command[0]) log_add(commit) commits.put((commit, command[1], command[2])) Redes3.grpc_log.append(commit) Listen_Udp.upd_log.append(commit) except ValueError as e: response = str(e) if command[1] == None: print(response) else: server.sendto(response.encode(), command[1])
def commits_since(self, start = 'master', since = '1970-01-01'): """ The Commits objects that are newer than the specified date. Commits are returned in chronological order. ``start`` is the branch/commit name (default 'master') ``since`` is a string represeting a date/time Returns ``git.Commit[]`` """ options = {'since': since} return Commit.find_all(self, start, **options)
def commit(self, id): """ The Commit object for the specified id ``id`` is the SHA1 identifier of the commit Returns git.Commit """ options = {'max_count': 1} commits = Commit.find_all(self, id, **options) if not commits: raise ValueError, 'Invalid identifier %s' % id return commits[0]
def log(self, commit='master', path=None, **kwargs): """ The Commit for a treeish, and all commits leading to it. ``kwargs`` keyword arguments specifying flags to be used in git-log command, i.e.: max_count=1 to limit the amount of commits returned Returns ``git.Commit[]`` """ options = {'pretty': 'raw'} options.update(kwargs) arg = [commit, '--'] if path: arg.append(path) commits = self.git.log(*arg, **options) return Commit.list_from_string(self, commits)
def write_to_json_commits_per_day(token, day): """ Parameters: token: authorization token day: day of commits we want to retrieve """ begin = day.replace(hour=0, minute=0, second=0) begin = begin.strftime("%Y-%m-%dT%H:%M:%SZ") end = day.replace(hour=23, minute=59, second=59) end = end.strftime("%Y-%m-%dT%H:%M:%SZ") file_output = day.strftime("%Y%m%d") commits_list = [] i = 1 while True: params = { "per_page": "100", "page": str(i), "since": begin, "until": end } url = "https://api.github.com/repos/facebook/react/commits" commits = make_request(url, token, params=params) print(commits) if not commits: break i += 1 nb_commits = 0 for commit in commits: sha = commit["sha"] date = commit["commit"]["author"]["date"] contributor = get_contributor_from_dict(commit["author"]) commit_obj = Commit(sha, date, contributor) commits_list.append(commit_obj) nb_commits += 1 if nb_commits < 100: break write_to_json("commits/" + file_output + ".json", commits_list)
def get_commits_since_date(repo, commit_date): commits_url = f"https://api.github.com/repos/joankilleen/{repo}/commits?since={commit_date}" print(commits_url) # Git all commits for date from Github response = github_calls.get_from_gitgub(url=commits_url) if response.status_code != 200: raise Exception( f"Commits could not be read from GitHub {response.status_code}") json = response.json() commits = [] # Iterate over commit for json_object in json: sha = json_object['sha'] commit_detail_url = f"https://api.github.com/repos/joankilleen/{repo}/commits/{sha}" # Get details of commit from GitHub reponse_for_commit = github_calls.get_from_gitgub( url=commit_detail_url) print(f"{reponse_for_commit.status_code}") if reponse_for_commit.status_code != 200: raise Exception( f"Commit {sha} could not be read from GitHub {reponse_for_commit.status_code}" ) # Read the changed files in the commit files = reponse_for_commit.json()['files'] filenames = [] for file in files: print(file['filename']) filenames.append(file['filename']) # Create a Commit object commit = Commit(sha, filenames) print(commit.sha) commits.append(commit) print(len(commits)) return commits
def commit(self, id, path = ''): """ The Commit object for the specified id ``id`` is the SHA1 identifier of the commit ``path`` is an optional path, if set the returned commit must contain the path. Returns ``git.Commit`` """ options = {'max_count': 1} commits = Commit.find_all(self, id, path, **options) if not commits: raise ValueError, "Invalid identifier %s, or given path '%s' too restrictive" % ( id, path ) return commits[0]
def commits(self, start = 'master', max_count = 10, skip = 0): """ A list of Commit objects representing the history of a given ref/commit ``start`` is the branch/commit name (default 'master') ``max_count`` is the maximum number of commits to return (default 10) ``skip`` is the number of commits to skip (default 0) Returns ``git.Commit[]`` """ options = {'max_count': max_count, 'skip': skip} return Commit.find_all(self, start, **options)
def commit(self, id, path=''): """ The Commit object for the specified id ``id`` is the SHA1 identifier of the commit ``path`` is an optinal path Returns git.Commit """ options = {'max_count': 1} commits = Commit.find_all(self, id, path, **options) if not commits: raise ValueError, 'Invalid identifier %s' % id return commits[0]
def commits_since(self, start='master', path='', since='1970-01-01'): """ The Commits objects that are newer than the specified date. Commits are returned in chronological order. ``start`` is the branch/commit name (default 'master') ``path`` is an optinal path ``since`` is a string represeting a date/time Returns ``git.Commit[]`` """ options = {'since': since} return Commit.find_all(self, start, path, **options)
def commit(self, id, path=''): """ The Commit object for the specified id ``id`` is the SHA1 identifier of the commit ``path`` is an optional path, if set the returned commit must contain the path. Returns ``git.Commit`` """ options = {'max_count': 1} commits = Commit.find_all(self, id, path, **options) if not commits: raise ValueError, "Invalid identifier %s, or given path '%s' too restrictive" % ( id, path) return commits[0]
def from_string(cls, repo, line): """ Create a new Tag instance from the given string. ``repo`` is the Repo ``line`` is the formatted tag information Format name: [a-zA-Z_/]+ <null byte> id: [0-9A-Fa-f]{40} Returns ``git.Tag`` """ full_name, ids = line.split("\x00") name = full_name.split("/")[-1] commit = Commit(repo, id=ids) return Tag(name, commit)
def app(environ, start_response): """ try http://localhost:8887 http://localhost:8887/?{path} """ status = '200 OK' headers = [] path = environ['RAW_URI'] [path, query] = (path.split('?') if '?' in path else [path, None]) if path.endswith('.js'): headers.append(('content-type', 'application/javascript')) headers.append(('content-encoding', 'gzip')) buffer = StringIO() start_response(status, headers) content = open('..' + path).read() with gzip.GzipFile(mode='wb', compresslevel=6, fileobj=buffer) as f: f.write(content) return [buffer.getvalue()] headers.append(('content-type', 'text/html')) start_response(status, headers) data = generate_graph_data(Commit.gets(query or root)) return engine.render('index.html', {'data': data})
def commits(self, start='master', path='', max_count=10, skip=0): """ A list of Commit objects representing the history of a given ref/commit ``start`` is the branch/commit name (default 'master') ``path`` is an optional path ``max_count`` is the maximum number of commits to return (default 10) ``skip`` is the number of commits to skip (default 0) Returns ``git.Commit[]`` """ options = {'max_count': max_count, 'skip': skip} return Commit.find_all(self, start, path, **options)
def commitFromString(self, lines): message = False messagetext = [] parents = None commitid = None for line in lines: tokens = line.split(' ') if message: if line.startswith("\t") or line.startswith(" "): messagetext.append(line.strip()) else: message = False else: if tokens[0] == 'commit': commitid = tokens[1] if tokens[0] == 'tree': tree = tokens[1] if tokens[0] == 'parent': if len(tokens) > 1: parents = [t for t in tokens[1:] if t != ""] elif tokens[0] == 'author': author = " ".join(tokens[1:-3]) authordate = datetime.datetime.fromtimestamp( (int(tokens[-2]) - int(tokens[-1])) // 1000) elif tokens[0] == 'committer': committer = tokens[1] committerdate = datetime.datetime.fromtimestamp( (int(tokens[-2]) - int(tokens[-1])) // 1000) elif tokens[0] == 'message': message = True if commitid is not None: c = Commit(self.repo, commitid, tree, parents, "\n".join(messagetext), author, authordate, committer, committerdate) return c else: return None
def generateCommitsFunction(nCommits: int): """Generates mock commit objects array""" commits = [] ORIGINAL_EXPECTED_DATE_STRING = '2021-01-17T09:11:55Z' EXPECTED_DATE = datetime.strptime(ORIGINAL_EXPECTED_DATE_STRING, "%Y-%m-%dT%XZ") for i in range(nCommits): EXPECTED_DATE_STRING = ( EXPECTED_DATE + timedelta(seconds=(i * 30))).strftime("%Y-%m-%dT%XZ") CORRECT_COMMIT_RESPONSE_DICT = { "commit": { "committer": { "date": EXPECTED_DATE_STRING } } } commits.append(Commit(CORRECT_COMMIT_RESPONSE_DICT)) def get_commits(): return commits return get_commits
def fetch_commit(repo=None, sha=None): """ Fetch a single commit by its repository name and SHA, or simply its SHA. """ if not repo and not sha: raise ValueError('Must provide either repo or sha') if not repo: return fetch_commit_by_sha(sha) cursor = conn.execute( ''' SELECT repo, sha, time, message, status, perplexity FROM commits WHERE repo = ? AND sha = ? ''', (repo, sha)) try: return Commit(*cursor.fetchone()) except TypeError: raise KeyError(sha)
def get_commits(github_user, fetch=False, write_to_file=False): password = get_password(github_user) if fetch: base_url = "https://api.github.com" events_url_tail = '/users/'+github_user+'/events' response = requests.get(base_url + events_url_tail, auth=(github_user, password)) events = response.content if write_to_file: with open('akaptur_events.txt', 'w') as f: f.write(events) # events = json.load(events) # probably doesn't work else: with open('akaptur_events.txt') as f: events = json.load(f) my_commits = [] print "Total events:", len(events) for event in events[:10]: print "Event type:", event['type'] pdb.set_trace() if event['type'] == "PushEvent": commits = event['payload']['commits'] print len(commits) for comm in commits: c = Commit() c.url = comm['url'] # per-event API call :( commit_response = requests.get(c.url, auth=(github_user, password)) commit_data = json.loads(commit_response.content) c.additions = commit_data['stats']['additions'] c.deletions = commit_data['stats']['deletions'] c.timestamp = commit_data['commit']['author']['date'] c.message = commit_data['commit']['message'] my_commits.append(c) return my_commits
workspace = Workspace(os.getcwd()) database = Database(db_path) entries = list() for name in workspace.list_files(): data = workspace.read_file(name) blob = Blob(data) database.store(blob) entries.append(Entry(name, blob.oid)) tree = Tree(entries) database.store(tree) name = os.environ['GIT_AUTHOR_NAME'] email = os.environ['GIT_AUTHOR_EMAIL'] author = Author(name, email, time()) message = sys.stdin.read() commit = Commit(tree.oid, author, message) database.store(commit) with open(os.path.join(git_path, 'HEAD'), 'w+') as file: file.write(commit.oid) print(f'[(root-commit) {commit.oid}] {message.splitlines()[0]}') sys.exit(0) else: print(f'git: {command!r} is not a git command.', file=sys.stderr) sys.exit(1)
def prover(): if len(sys.argv) < 3: printUsage() return 1 transcriptLocation = "." if len(sys.argv) == 4: transcriptLocation = sys.argv[3] if not os.path.exists(transcriptLocation): print "%s doesn't exist."%transcriptLocation print "Transcripts cannot be written. Exiting." return 1 # Get Named Pipes pipeRd, pipeWr = getNamedPipes() # Parse Input Files commonInputFile = sys.argv[1] proverInputFile = sys.argv[2] g1, g2 = process.parse_input_file(commonInputFile) subgraphInducer, pi_original = process.parse_prover_input_file(proverInputFile) # Exchange Iteration Count and Identifier iterCount = int(process.readPipe(pipeRd)) print "Number of Iterations: %d\n"%iterCount uid = process.readPipe(pipeRd).rstrip() for iteration in range(0,iterCount): # Protocol/Iteration Transcript fname = "%s/transcript_prover_iter_%d_%s.txt"%(transcriptLocation, iteration, uid) fp = open(fname, 'w') print "\n\nIteration number " + str(iteration) print "Transcript is being written to file %s"%(fname) # Generate random Isomorphism alpha and matrix Q=Alpha(G2) alpha = process.get_random_isomorphism(len(g2)) q = process.get_isomorphic_graph(g2, alpha) print "Generated Random Isomorphism Alpha" # Send Commitment commitQ = Commit(q) (commitmentQ, randomAQ) = commitQ.getCommitment() fp.write("Commitment of matrix Q:\n" + commit.prettyPrintMatrix(commitmentQ) + "\n\n") fp.write("Matrix randomA:\n" + commit.prettyPrintMatrix(randomAQ) + "\n\n") process.writePipe(pipeWr, json.dumps(commitmentQ)+"\n") process.writePipe(pipeWr, json.dumps(randomAQ)+"\n") print "Commited to Q" # Get Coin Toss coin_toss = process.readPipe(pipeRd).rstrip(); fp.write("Coint toss Result: " + coin_toss + "\n") print "Coin Toss Received '%s'"%coin_toss if coin_toss == 'h': # Heads # Reveal Isomorphism Alpha and secret random commitment matrix randomBQ randomBQ = commitQ.revealCommitment() process.writePipe(pipeWr, json.dumps(alpha)+"\n") process.writePipe(pipeWr, json.dumps(randomBQ)+"\n") fp.write("Revealed Isomorphism alpha \n" + str(alpha) + "\n") fp.write("Revealed matrix randomBQ \n" + commit.prettyPrintMatrix(randomBQ) + "\n\n") print "Revealed Isomorphism Alpha and secret commitment matrix RandomB" else: # Tails otherwise # Calculate Isomorphism Pi pi, qP = process.get_iso_and_iso_subgraph(g1, g2, subgraphInducer, pi_original, alpha, q) # Calculate partial secret random commitment matrix randomBQ_partial subgraph_bool_matrix = process.get_boolean_matrix(q, process.apply_iso_on_subgph_indc(subgraphInducer, alpha)) randomBQ_partial = commitQ.revealCommitment(subgraph_bool_matrix) # Send Isomorphism Pi, partial secret random commitment matrix randomBQ_partial and partial subgraph operator process.writePipe(pipeWr, json.dumps(pi)+"\n") process.writePipe(pipeWr, json.dumps(randomBQ_partial)+"\n") process.writePipe(pipeWr, json.dumps(process.apply_iso_on_subgph_indc(subgraphInducer, alpha)["VD"])+"\n") fp.write("Revealed Isomorphism Pi \n" + str(pi) + "\n") fp.write("Revealed Partial matrix randomBQ_partial \n" + commit.prettyPrintMatrix(randomBQ_partial) + "\n") fp.write("Revealed Vertex Deletion Info \n" + str(process.apply_iso_on_subgph_indc(subgraphInducer, alpha)["VD"]) + "\n") print "Revealed Subgraph Isomorphism Pi, Partial Random Commitment matrix RandomB and Vertex Deletion Information"
def commit(self): if self._commit is None: lines = self.repo.connector.cat(self.tagid).split("\n") commitid = lines[3][-40:] self._commit = Commit.fromref(self.repo, commitid) return self._commit