def obj_sha(type, chunks): """Compute the SHA for a numeric type and object chunks.""" sha = make_sha() sha.update(object_header(type, chunks_length(chunks))) for chunk in chunks: sha.update(chunk) return sha.digest()
def sha(self): """The SHA1 object that is the name of this object.""" if self._needs_serialization or self._sha is None: self._sha = make_sha() self._sha.update(self._header()) self._sha.update(self.as_raw_string()) return self._sha
def iter_sha1(iter): """Return the hexdigest of the SHA1 over a set of names. :param iter: Iterator over string objects :return: 40-byte hex sha1 digest """ sha1 = make_sha() for name in iter: sha1.update(name) return sha1.hexdigest()
def calculate_checksum(self): """Calculate the checksum for this pack. :return: 20-byte binary SHA1 digest """ map, map_offset = simple_mmap(self._file, 0, self._size - 20) try: return make_sha(map[map_offset:self._size-20]).digest() finally: map.close()
def sha(self): """The SHA1 object that is the name of this object.""" if self._sha is None or self._needs_serialization: # this is a local because as_raw_chunks() overwrites self._sha new_sha = make_sha() new_sha.update(self._header()) for chunk in self.as_raw_chunks(): new_sha.update(chunk) self._sha = new_sha return self._sha
def calculate_checksum(self): """Calculate the checksum for this pack. :return: 20-byte binary SHA1 digest """ map, map_offset = simple_mmap(self._file, 0, self._size - 20) try: return make_sha(map[map_offset:self._size - 20]).digest() finally: map.close()
def __init__(self, read_all, read_some=None): self.read_all = read_all if read_some is None: self.read_some = read_all else: self.read_some = read_some self.sha = make_sha() self._offset = 0 self._rbuf = StringIO() # trailer is a deque to avoid memory allocation on small reads self._trailer = deque()
def calculate_checksum(self): """Calculate the checksum for this pack. :return: 20-byte binary SHA1 digest """ s = make_sha() self._file.seek(0) todo = self._get_size() - 20 while todo > 0: x = self._file.read(min(todo, 1<<16)) s.update(x) todo -= len(x) return s.digest()
def __init__(self, f): self.f = f self.sha1 = make_sha("")
def calculate_checksum(self): """Calculate the SHA1 checksum over this pack index. :return: This is a 20-byte binary digest """ return make_sha(self._contents[:-20]).digest()
def _make_sha(self): ret = make_sha() ret.update(self._header()) for chunk in self.as_raw_chunks(): ret.update(chunk) return ret
def write_git_tree(self, ctx): trees = {} man = ctx.manifest() ctx_id = hex(ctx.node()) renames = [] for filenm, nodesha in man.iteritems(): file_id = hex(nodesha) if ctx_id not in self.previous_entries: self.previous_entries[ctx_id] = {} self.previous_entries[ctx_id][filenm] = file_id # write blob if not in our git database fctx = ctx.filectx(filenm) same_as_last = False for par in ctx.parents(): par_id = hex(par.node()) if par_id in self.previous_entries: if filenm in self.previous_entries[par_id]: if self.previous_entries[par_id][filenm] == file_id: same_as_last = True if not same_as_last: rename = fctx.renamed() if rename: filerename, sha = rename renames.append((filerename, filenm)) is_exec = 'x' in fctx.flags() is_link = 'l' in fctx.flags() blob_sha = self.map_git_get(file_id) if not blob_sha: blob_sha = self.git.write_blob(fctx.data()) # writing new blobs to git self.map_set(blob_sha, file_id) parts = filenm.split('/') if len(parts) > 1: # get filename and path for leading subdir filepath = parts[-1:][0] dirpath = "/".join([v for v in parts[0:-1]]) + '/' # get subdir name and path for parent dir parpath = '/' nparpath = '/' for part in parts[0:-1]: if nparpath == '/': nparpath = part + '/' else: nparpath += part + '/' treeentry = ['tree', part + '/', nparpath] if parpath not in trees: trees[parpath] = [] if treeentry not in trees[parpath]: trees[parpath].append( treeentry ) parpath = nparpath # set file entry fileentry = ['blob', filepath, blob_sha, is_exec, is_link] if dirpath not in trees: trees[dirpath] = [] trees[dirpath].append(fileentry) else: fileentry = ['blob', parts[0], blob_sha, is_exec, is_link] if '/' not in trees: trees['/'] = [] trees['/'].append(fileentry) dirs = trees.keys() if dirs: # sort by tree depth, so we write the deepest trees first dirs.sort(lambda a, b: len(b.split('/'))-len(a.split('/'))) dirs.remove('/') dirs.append('/') else: # manifest is empty => make empty root tree trees['/'] = [] dirs = ['/'] # write all the trees tree_sha = None tree_shas = {} for dirnm in dirs: tree_data = [] sha_group = [] # calculating a sha for the tree, so we don't write it twice listsha = make_sha() for entry in trees[dirnm]: # replace tree path with tree SHA if entry[0] == 'tree': sha = tree_shas[entry[2]] entry[2] = sha listsha.update(entry[1]) listsha.update(entry[2]) tree_data.append(entry) listsha = listsha.hexdigest() if listsha in self.written_trees: tree_sha = self.written_trees[listsha] tree_shas[dirnm] = tree_sha else: tree_sha = self.git.write_tree_array(tree_data) # writing new trees to git tree_shas[dirnm] = tree_sha self.written_trees[listsha] = tree_sha return (tree_sha, renames) # should be the last root tree sha