def __init__(self, alias, url): if hg.islocal(url.encode('utf-8')): url = p(url).abspath() # Force git to use an absolute path in the future remote_name = os.path.basename(sys.argv[0]).replace( "git-remote-", "") cmd = [ 'git', 'config', 'remote.%s.url' % alias, "%s::%s" % (remote_name, url) ] subprocess.call(cmd) # use hash of URL as unique identifier in various places. # this has the advantage over 'alias' that it stays constant # when the user does a "git remote rename old new". self.uuid = sha1(url.encode('utf-8')).hexdigest() gitdir = p(os.environ['GIT_DIR'].decode('utf-8')) self.remotedir = gitdir.joinpath('hg', self.uuid) self.marks_git_path = self.remotedir.joinpath('marks-git') self.marks_hg_path = self.remotedir.joinpath('marks-hg') self.marks = HGMarks(self.marks_hg_path) self.git_marks = GitMarks(self.marks_git_path) self.parsed_refs = {} self.blob_marks = {} self.branches = {} self.bookmarks = {} self.prefix = 'refs/hg/%s' % alias self.alias = alias self.url = url self.build_repo(url)
def debuggethostfingerprint(ui, repo, source='default'): """retrieve a fingerprint of the server certificate The server certificate is not verified. """ source = ui.expandpath(source) u = util.url(source) scheme = (u.scheme or '').split('+')[-1] host = u.host port = util.getport(u.port or scheme or '-1') if scheme != 'https' or not host or not (0 <= port <= 65535): raise util.Abort(_('unsupported URL: %s') % source) sock = socket.socket() try: sock.connect((host, port)) sock = sslutil.wrapsocket(sock, None, None, ui, serverhostname=host) peercert = sock.getpeercert(True) if not peercert: raise util.Abort(_('%s certificate error: no certificate received') % host) finally: sock.close() s = util.sha1(peercert).hexdigest() ui.write(':'.join([s[x:x + 2] for x in xrange(0, len(s), 2)]), '\n')
def buildtemprevlog(repo, file): # get filename key filekey = util.sha1(file).hexdigest() filedir = os.path.join(repo.path, 'store/data', filekey) # sort all entries based on linkrev fctxs = [] for filenode in os.listdir(filedir): fctxs.append(repo.filectx(file, fileid=bin(filenode))) fctxs = sorted(fctxs, key=lambda x: x.linkrev()) # add to revlog temppath = repo.sjoin('data/temprevlog.i') if os.path.exists(temppath): os.remove(temppath) r = filelog.filelog(repo.sopener, 'temprevlog') class faket(object): def add(self, a,b,c): pass t = faket() for fctx in fctxs: if fctx.node() not in repo: continue p = fctx.filelog().parents(fctx.filenode()) meta = {} if fctx.renamed(): meta['copy'] = fctx.renamed()[0] meta['copyrev'] = hex(fctx.renamed()[1]) r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1]) return r
def hexsha1(data): """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like object data""" h = util.sha1() for chunk in util.filechunkiter(data): h.update(chunk) return h.hexdigest()
def __init__(self, alias, url): if hg.islocal(url.encode('utf-8')): url = p(url).abspath() # Force git to use an absolute path in the future cmd = ['git', 'config', 'remote.%s.url' % alias, "gitifyhg::%s" % url] subprocess.call(cmd) # use hash of URL as unique identifier in various places. # this has the advantage over 'alias' that it stays constant # when the user does a "git remote rename old new". self.uuid = sha1(url.encode('utf-8')).hexdigest() gitdir = p(os.environ['GIT_DIR'].decode('utf-8')) self.remotedir = gitdir.joinpath('hg', self.uuid) self.marks_git_path = self.remotedir.joinpath('marks-git') self.marks = HGMarks(self.remotedir.joinpath('marks-hg')) self.parsed_refs = {} self.blob_marks = {} self.branches = {} self.bookmarks = {} self.prefix = 'refs/hg/%s' % alias self.alias = alias self.url = url self.build_repo(url)
def _obsrelsethashtree(repo, encodeonemarker): cache = [] unfi = repo.unfiltered() markercache = {} for i in unfi: ctx = unfi[i] entry = 0 sha = util.sha1() # add data from p1 for p in ctx.parents(): p = p.rev() if p < 0: p = node.nullid else: p = cache[p][1] if p != node.nullid: entry += 1 sha.update(p) tmarkers = repo.obsstore.relevantmarkers([ctx.node()]) if tmarkers: bmarkers = [] for m in tmarkers: if not m in markercache: markercache[m] = encodeonemarker(m) bmarkers.append(markercache[m]) bmarkers.sort() for m in bmarkers: entry += 1 sha.update(m) if entry: cache.append((ctx.node(), sha.digest())) else: cache.append((ctx.node(), node.nullid)) return cache
def __init__(self, alias, url): if hg.islocal(url.encode("utf-8")): url = p(url).abspath() # Force git to use an absolute path in the future remote_name = os.path.basename(sys.argv[0]).replace("git-remote-", "") cmd = ["git", "config", "remote.%s.url" % alias, "%s::%s" % (remote_name, url)] subprocess.call(cmd) # use hash of URL as unique identifier in various places. # this has the advantage over 'alias' that it stays constant # when the user does a "git remote rename old new". if hg_version() >= "4.0.1": d = digester(["md5", "sha1"]) d.update(url.encode("utf-8")) self.uuid = d["sha1"] else: self.uuid = sha1(url.encode("utf-8")).hexdigest() gitdir = p(os.environ["GIT_DIR"].decode("utf-8")) self.remotedir = gitdir.joinpath("hg", self.uuid) self.marks_git_path = self.remotedir.joinpath("marks-git") self.marks_hg_path = self.remotedir.joinpath("marks-hg") self.marks = HGMarks(self.marks_hg_path) self.git_marks = GitMarks(self.marks_git_path) self.parsed_refs = {} self.blob_marks = {} self.branches = {} self.bookmarks = {} self.prefix = "refs/hg/%s" % alias self.alias = alias self.url = url self.build_repo(url)
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" usebundle2 = (repo.ui.config('experimental', 'bundle2-exp', True) and repo.ui.config('experimental', 'strip-bundle2-version')) if usebundle2: cgversion = repo.ui.config('experimental', 'strip-bundle2-version') if cgversion not in changegroup.packermap: repo.ui.warn(_('unknown strip-bundle2-version value %r; ' 'should be one of %r\n') % (cgversion, sorted(changegroup.packermap.keys()),)) cgversion = '01' usebundle2 = False else: cgversion = '01' cg = changegroup.changegroupsubset(repo, bases, heads, 'strip', version=cgversion) backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) # Include a hash of all the nodes in the filename for uniqueness allcommits = repo.set('%ln::%ln', bases, heads) allhashes = sorted(c.hex() for c in allcommits) totalhash = util.sha1(''.join(allhashes)).hexdigest() name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix) if usebundle2: bundletype = "HG20" elif compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
def copyandhash(instream, outfile): '''Read bytes from instream (iterable) and write them to outfile, computing the SHA-1 hash of the data along the way. Return the hash.''' hasher = util.sha1('') for data in instream: hasher.update(data) outfile.write(data) return hasher.hexdigest()
def filesha(repo, file): '''returns a sha1 of file contents''' f = util.pathto(repo.root, None, file) if os.path.exists(f): contents = open(f).read() else: contents = '' return util.sha1(contents).hexdigest()
def filesha(repo, file): '''returns a sha1 of file contents''' f = util.pathto(repo.root, None, file) if os.path.exists(f): contents = open(f).read() else: contents = ''; return util.sha1(contents).hexdigest()
def hashfile(file): if not os.path.exists(file): return '' hasher = util.sha1('') fd = open(file, 'rb') for data in util.filechunkiter(fd, 128 * 1024): hasher.update(data) fd.close() return hasher.hexdigest()
def hashfile(file): if not os.path.exists(file): return '' hasher = util.sha1('') fd = open(file, 'rb') for data in blockstream(fd): hasher.update(data) fd.close() return hasher.hexdigest()
def hg_sha1(url): encoded = url.encode('utf-8') if hg_version() >= '3.2': d = digester(['md5', 'sha1']) d.update(encoded) return d['sha1'] else: return sha1(encoded).hexdigest()
def __init__(self, patchpath, repo, pf=None, rev=None): """ Read patch context from file :param pf: currently ignored The provided handle is used to read the patch and the patchpath contains the name of the patch. The handle is NOT closed. """ self._path = patchpath self._patchname = os.path.basename(patchpath) self._repo = repo self._rev = rev or 'patch' self._status = [[], [], []] self._fileorder = [] self._user = '' self._desc = '' self._branch = '' self._node = node.nullid self._identity = node.nullid self._mtime = None self._fsize = 0 self._parseerror = None self._phase = 'draft' try: self._mtime = os.path.getmtime(patchpath) self._fsize = os.path.getsize(patchpath) ph = mq.patchheader(self._path) self._ph = ph hash = util.sha1(self._path) hash.update(str(self._mtime)) self._identity = hash.digest() except EnvironmentError: self._date = util.makedate() return try: self._branch = ph.branch or '' self._node = binascii.unhexlify(ph.nodeid) if self._repo.ui.configbool('mq', 'secret'): self._phase = 'secret' except TypeError: pass except AttributeError: # hacks to try to deal with older versions of mq.py self._branch = '' ph.diffstartline = len(ph.comments) if ph.message: ph.diffstartline += 1 except error.ConfigError: pass self._user = ph.user or '' self._desc = ph.message and '\n'.join(ph.message).strip() or '' try: self._date = ph.date and util.parsedate(ph.date) or util.makedate() except error.Abort: self._date = util.makedate()
def storeuntracked(repo, untracked): if not untracked: return os.mkdir(repo.join('tasks/untrackedbackup')) for f in untracked: shaname = util.sha1(f).hexdigest() util.copyfile(util.pathto(repo.root, None, f), repo.join('tasks/untrackedbackup/%s' % shaname)) util.unlink(util.pathto(repo.root, None, f))
def __call__(self, sock, strict=False): host = self.host cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if not getattr(sock, 'getpeercert', False): # python 2.5 ? if hostfingerprint: raise util.Abort(_("host fingerprint for %s can't be " "verified (Python too old)") % host) if strict: raise util.Abort(_("certificate for %s can't be verified " "(Python too old)") % host) if self.ui.configbool('ui', 'reportoldssl', True): self.ui.warn(_("warning: certificate for %s can't be verified " "(Python too old)\n") % host) return if not sock.cipher(): # work around http://bugs.python.org/issue13721 raise util.Abort(_('%s ssl connection error') % host) try: peercert = sock.getpeercert(True) peercert2 = sock.getpeercert() except AttributeError: raise util.Abort(_('%s ssl connection error') % host) if not peercert: raise util.Abort(_('%s certificate error: ' 'no certificate received') % host) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort(_('certificate for %s has unexpected ' 'fingerprint %s') % (host, nicefingerprint), hint=_('check hostfingerprint configuration')) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) elif cacerts: msg = _verifycert(peercert2, host) if msg: raise util.Abort(_('%s certificate error: %s') % (host, msg), hint=_('configure hostfingerprint %s or use ' '--insecure to connect insecurely') % nicefingerprint) self.ui.debug('%s certificate successfully verified\n' % host) elif strict: raise util.Abort(_('%s certificate with fingerprint %s not ' 'verified') % (host, nicefingerprint), hint=_('check hostfingerprints or web.cacerts ' 'config setting')) else: self.ui.warn(_('warning: %s certificate with fingerprint %s not ' 'verified (check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint))
def copyandhash(instream, outfile): '''Read bytes from instream (iterable) and write them to outfile, computing the SHA-1 hash of the data along the way. Close outfile when done and return the binary hash.''' hasher = util.sha1('') for data in instream: hasher.update(data) outfile.write(data) # Blecch: closing a file that somebody else opened is rude and # wrong. But it's so darn convenient and practical! After all, # outfile was opened just to copy and hash. outfile.close() return hasher.digest()
def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" usebundle2 = (repo.ui.config('experimental', 'bundle2-exp') and repo.ui.config('experimental', 'strip-bundle2-version')) if usebundle2: cgversion = repo.ui.config('experimental', 'strip-bundle2-version') if cgversion not in changegroup.packermap: repo.ui.warn( _('unknown strip-bundle2-version value %r; ' + 'should be one of %r\n') % ( cgversion, sorted(changegroup.packermap.keys()), )) cgversion = '01' usebundle2 = False else: cgversion = '01' cg = changegroup.changegroupsubset(repo, bases, heads, 'strip', version=cgversion) backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) # Include a hash of all the nodes in the filename for uniqueness hexbases = (hex(n) for n in bases) hexheads = (hex(n) for n in heads) allcommits = repo.set('%ls::%ls', hexbases, hexheads) allhashes = sorted(c.hex() for c in allcommits) totalhash = util.sha1(''.join(allhashes)).hexdigest() name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix) if usebundle2: bundletype = "HG2Y" elif compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs)
def __call__(self, sock): host = self.host cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if cacerts and not hostfingerprint: msg = _verifycert(sock.getpeercert(), host) if msg: raise util.Abort( _('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (host, msg)) self.ui.debug('%s certificate successfully verified\n' % host) else: if getattr(sock, 'getpeercert', False): peercert = sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([ peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2) ]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort( _('invalid certificate for %s ' 'with fingerprint %s') % (host, nicefingerprint)) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) else: self.ui.warn( _('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort( _("host fingerprint for %s can't be " "verified (Python too old)") % host) self.ui.warn( _("warning: certificate for %s can't be " "verified (Python too old)\n") % host)
def _hashignore(ignore): """Calculate hash for ignore patterns and filenames If this information changes between Mercurial invocations, we can't rely on Watchman information anymore and have to re-scan the working copy. """ sha1 = util.sha1() if util.safehasattr(ignore, 'includepat'): sha1.update(ignore.includepat) sha1.update('\0\0') if util.safehasattr(ignore, 'excludepat'): sha1.update(ignore.excludepat) sha1.update('\0\0') if util.safehasattr(ignore, 'patternspat'): sha1.update(ignore.patternspat) sha1.update('\0\0') if util.safehasattr(ignore, '_files'): for f in ignore._files: sha1.update(f) sha1.update('\0') return sha1.hexdigest()
def _hashignore(ignore): """Calculate hash for ignore patterns and filenames If this information changes between Mercurial invocations, we can't rely on Watchman information anymore and have to re-scan the working copy. """ sha1 = util.sha1() if util.safehasattr(ignore, "includepat"): sha1.update(ignore.includepat) sha1.update("\0\0") if util.safehasattr(ignore, "excludepat"): sha1.update(ignore.excludepat) sha1.update("\0\0") if util.safehasattr(ignore, "patternspat"): sha1.update(ignore.patternspat) sha1.update("\0\0") if util.safehasattr(ignore, "_files"): for f in ignore._files: sha1.update(f) sha1.update("\0") return sha1.hexdigest()
def __call__(self, sock): host = self.host cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if cacerts and not hostfingerprint: msg = _verifycert(sock.getpeercert(), host) if msg: raise util.Abort(_('%s certificate error: %s ' '(use --insecure to connect ' 'insecurely)') % (host, msg)) self.ui.debug('%s certificate successfully verified\n' % host) else: if getattr(sock, 'getpeercert', False): peercert = sock.getpeercert(True) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise util.Abort(_('invalid certificate for %s ' 'with fingerprint %s') % (host, nicefingerprint)) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) else: self.ui.warn(_('warning: %s certificate ' 'with fingerprint %s not verified ' '(check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint)) else: # python 2.5 ? if hostfingerprint: raise util.Abort(_("host fingerprint for %s can't be " "verified (Python too old)") % host) self.ui.warn(_("warning: certificate for %s can't be " "verified (Python too old)\n") % host)
def buildtemprevlog(repo, file): # get filename key filekey = util.sha1(file).hexdigest() filedir = os.path.join(repo.path, 'store/data', filekey) # sort all entries based on linkrev fctxs = [] for filenode in os.listdir(filedir): fctxs.append(repo.filectx(file, fileid=bin(filenode))) fctxs = sorted(fctxs, key=lambda x: x.linkrev()) # add to revlog temppath = repo.sjoin('data/temprevlog.i') if os.path.exists(temppath): os.remove(temppath) r = filelog.filelog(repo.sopener, 'temprevlog') class faket(object): def add(self, a, b, c): pass t = faket() for fctx in fctxs: if fctx.node() not in repo: continue p = fctx.filelog().parents(fctx.filenode()) meta = {} if fctx.renamed(): meta['copy'] = fctx.renamed()[0] meta['copyrev'] = hex(fctx.renamed()[1]) r.add(fctx.data(), meta, t, fctx.linkrev(), p[0], p[1]) return r
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash): # Convert src parents to dst parents parents = _convertparents(ctx, revmap) # Generate list of changed files files = _getchangedfiles(ctx, parents) dstfiles = [] for f in files: if f not in lfiles and f not in normalfiles: islfile = _islfile(f, ctx, matcher, size) # If this file was renamed or copied then copy # the largefile-ness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) renamed = fctx.renamed() renamedlfile = renamed and renamed[0] in lfiles islfile |= renamedlfile if 'l' in fctx.flags(): if renamedlfile: raise util.Abort( _('renamed/copied largefile %s becomes symlink') % f) islfile = False if islfile: lfiles.add(f) else: normalfiles.add(f) if f in lfiles: dstfiles.append(lfutil.standin(f)) # largefile in manifest if it has not been removed/renamed if f in ctx.manifest(): fctx = ctx.filectx(f) if 'l' in fctx.flags(): renamed = fctx.renamed() if renamed and renamed[0] in lfiles: raise util.Abort(_('largefile %s becomes symlink') % f) # largefile was modified, update standins m = util.sha1('') m.update(ctx[f].data()) hash = m.hexdigest() if f not in lfiletohash or lfiletohash[f] != hash: rdst.wwrite(f, ctx[f].data(), ctx[f].flags()) executable = 'x' in ctx[f].flags() lfutil.writestandin(rdst, lfutil.standin(f), hash, executable) lfiletohash[f] = hash else: # normal file dstfiles.append(f) def getfilectx(repo, memctx, f): if lfutil.isstandin(f): # if the file isn't in the manifest then it was removed # or renamed, raise IOError to indicate this srcfname = lfutil.splitstandin(f) try: fctx = ctx.filectx(srcfname) except error.LookupError: return None renamed = fctx.renamed() if renamed: # standin is always a largefile because largefile-ness # doesn't change after rename or copy renamed = lfutil.standin(renamed[0]) return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n', 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) else: return _getnormalcontext(repo, ctx, f, revmap) # Commit _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
def _hashlist(items): """return sha1 hexdigest for a list""" return util.sha1(str(items)).hexdigest()
def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash): # Convert src parents to dst parents parents = _convertparents(ctx, revmap) # Generate list of changed files files = _getchangedfiles(ctx, parents) dstfiles = [] for f in files: if f not in lfiles and f not in normalfiles: islfile = _islfile(f, ctx, matcher, size) # If this file was renamed or copied then copy # the largefile-ness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) renamed = fctx.renamed() renamedlfile = renamed and renamed[0] in lfiles islfile |= renamedlfile if 'l' in fctx.flags(): if renamedlfile: raise util.Abort( _('renamed/copied largefile %s becomes symlink') % f) islfile = False if islfile: lfiles.add(f) else: normalfiles.add(f) if f in lfiles: dstfiles.append(lfutil.standin(f)) # largefile in manifest if it has not been removed/renamed if f in ctx.manifest(): fctx = ctx.filectx(f) if 'l' in fctx.flags(): renamed = fctx.renamed() if renamed and renamed[0] in lfiles: raise util.Abort(_('largefile %s becomes symlink') % f) # largefile was modified, update standins m = util.sha1('') m.update(ctx[f].data()) hash = m.hexdigest() if f not in lfiletohash or lfiletohash[f] != hash: rdst.wwrite(f, ctx[f].data(), ctx[f].flags()) executable = 'x' in ctx[f].flags() lfutil.writestandin(rdst, lfutil.standin(f), hash, executable) lfiletohash[f] = hash else: # normal file dstfiles.append(f) def getfilectx(repo, memctx, f): if lfutil.isstandin(f): # if the file isn't in the manifest then it was removed # or renamed, raise IOError to indicate this srcfname = lfutil.splitstandin(f) try: fctx = ctx.filectx(srcfname) except error.LookupError: raise IOError renamed = fctx.renamed() if renamed: # standin is always a largefile because largefile-ness # doesn't change after rename or copy renamed = lfutil.standin(renamed[0]) return context.memfilectx(repo, f, lfiletohash[srcfname] + '\n', 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) else: return _getnormalcontext(repo, ctx, f, revmap) # Commit _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
def restoreuntracked(repo, untracked): for f in untracked: shaname = util.sha1(f).hexdigest() util.copyfile(repo.join('tasks/untrackedbackup/%s' % shaname), util.pathto(repo.root, None, f))
def getlocalkey(file, id): pathhash = util.sha1(file).hexdigest() return os.path.join(pathhash, id)
def stashfiles(repo, task): stashhex = util.sha1(task).hexdigest() return ('tasks/%s.stash' % stashhex, 'tasks/%s.dirstate' % stashhex)
def hashfile(file): hasher = util.sha1('') with open(file, 'rb') as fd: for data in blockstream(fd): hasher.update(data) return hasher.hexdigest()
def getcachekey(reponame, file, id): pathhash = util.sha1(file).hexdigest() return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
def _hashfile(infile): hasher = util.sha1('') for data in _blockstream(infile): hasher.update(data) infile.close() return hasher.hexdigest()
def _bfconvert_addchangeset(rsrc, rdst, ctx, revmap, bfiles, normalfiles, matcher, size, bfiletohash): # Convert src parents to dst parents parents = [] for p in ctx.parents(): parents.append(revmap[p.node()]) while len(parents) < 2: parents.append(node.nullid) # Generate list of changed files files = set(ctx.files()) if node.nullid not in parents: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): files.add(f) dstfiles = [] for f in files: if f not in bfiles and f not in normalfiles: isbfile = _isbfile(f, ctx, matcher, size) # If this file was renamed or copied then copy # the bfileness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) renamed = fctx.renamed() renamedbfile = renamed and renamed[0] in bfiles isbfile |= renamedbfile if 'l' in fctx.flags(): if renamedbfile: raise util.Abort( _('Renamed/copied bfile %s becomes symlink') % f) isbfile = False if isbfile: bfiles.add(f) else: normalfiles.add(f) if f in bfiles: dstfiles.append(bfutil.standin(f)) # bfile in manifest if it has not been removed/renamed if f in ctx.manifest(): if 'l' in ctx.filectx(f).flags(): if renamed and renamed[0] in bfiles: raise util.Abort(_('bfile %s becomes symlink') % f) # bfile was modified, update standins fullpath = rdst.wjoin(f) bfutil.createdir(os.path.dirname(fullpath)) m = util.sha1('') m.update(ctx[f].data()) hash = m.hexdigest() if f not in bfiletohash or bfiletohash[f] != hash: try: fd = open(fullpath, 'wb') fd.write(ctx[f].data()) finally: if fd: fd.close() executable = 'x' in ctx[f].flags() os.chmod(fullpath, bfutil.getmode(executable)) bfutil.writestandin(rdst, bfutil.standin(f), hash, executable) bfiletohash[f] = hash else: # normal file dstfiles.append(f) def getfilectx(repo, memctx, f): if bfutil.isstandin(f): # if the file isn't in the manifest then it was removed # or renamed, raise IOError to indicate this srcfname = bfutil.splitstandin(f) try: fctx = ctx.filectx(srcfname) except error.LookupError: raise IOError() renamed = fctx.renamed() if renamed: # standin is always a bfile because bfileness # doesn't change after rename or copy renamed = bfutil.standin(renamed[0]) return context.memfilectx(f, bfiletohash[srcfname], 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) else: try: fctx = ctx.filectx(f) except error.LookupError: raise IOError() renamed = fctx.renamed() if renamed: renamed = renamed[0] data = fctx.data() if f == '.hgtags': newdata = [] for line in data.splitlines(): id, name = line.split(' ', 1) newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), name)) data = ''.join(newdata) return context.memfilectx(f, data, 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) # Commit mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, getfilectx, ctx.user(), ctx.date(), ctx.extra()) ret = rdst.commitctx(mctx) rdst.dirstate.setparents(ret) revmap[ctx.node()] = rdst.changelog.tip()
def _bfconvert_addchangeset(rsrc, rdst, ctx, revmap, bfiles, normalfiles, matcher, size, bfiletohash): # Convert src parents to dst parents parents = [] for p in ctx.parents(): parents.append(revmap[p.node()]) while len(parents) < 2: parents.append(node.nullid) # Generate list of changed files files = set(ctx.files()) if node.nullid not in parents: mc = ctx.manifest() mp1 = ctx.parents()[0].manifest() mp2 = ctx.parents()[1].manifest() for f in mp1: if f not in mc: files.add(f) for f in mp2: if f not in mc: files.add(f) for f in mc: if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None): files.add(f) dstfiles = [] for f in files: if f not in bfiles and f not in normalfiles: isbfile = _is_bfile(f, ctx, matcher, size) # If this file was renamed or copied then copy # the bfileness of its predecessor if f in ctx.manifest(): fctx = ctx.filectx(f) renamed = fctx.renamed() renamedbfile = renamed and renamed[0] in bfiles isbfile |= renamedbfile if 'l' in fctx.flags(): if renamedbfile: raise util.Abort( _('Renamed/copied bfile %s becomes symlink') % f) isbfile = False if isbfile: bfiles.add(f) else: normalfiles.add(f) if f in bfiles: dstfiles.append(bfutil.standin(f)) # bfile in manifest if it has not been removed/renamed if f in ctx.manifest(): if 'l' in ctx.filectx(f).flags(): if renamed and renamed[0] in bfiles: raise util.Abort(_('bfile %s becomes symlink') % f) # bfile was modified, update standins fullpath = rdst.wjoin(f) bfutil.create_dir(os.path.dirname(fullpath)) m = util.sha1('') m.update(ctx[f].data()) hash = m.hexdigest() if f not in bfiletohash or bfiletohash[f] != hash: with open(fullpath, 'wb') as fd: fd.write(ctx[f].data()) executable = 'x' in ctx[f].flags() os.chmod(fullpath, bfutil.get_mode(executable)) bfutil.write_standin(rdst, bfutil.standin(f), hash, executable) bfiletohash[f] = hash else: # normal file dstfiles.append(f) def getfilectx(repo, memctx, f): if bfutil.is_standin(f): # if the file isn't in the manifest then it was removed # or renamed, raise IOError to indicate this srcfname = bfutil.split_standin(f) try: fctx = ctx.filectx(srcfname) except error.LookupError: raise IOError() renamed = fctx.renamed() if renamed: # standin is always a bfile because bfileness # doesn't change after rename or copy renamed = bfutil.standin(renamed[0]) return context.memfilectx(f, bfiletohash[srcfname], 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) else: try: fctx = ctx.filectx(f) except error.LookupError: raise IOError() renamed = fctx.renamed() if renamed: renamed = renamed[0] data = fctx.data() if f == '.hgtags': newdata = [] for line in data.splitlines(): id, name = line.split(' ', 1) newdata.append('%s %s\n' % (node.hex(revmap[node.bin(id)]), name)) data = ''.join(newdata) return context.memfilectx(f, data, 'l' in fctx.flags(), 'x' in fctx.flags(), renamed) # Commit mctx = context.memctx(rdst, parents, ctx.description(), dstfiles, getfilectx, ctx.user(), ctx.date(), ctx.extra()) ret = rdst.commitctx(mctx) rdst.dirstate.setparents(ret) revmap[ctx.node()] = rdst.changelog.tip()
def stashfiles(repo, task): stashhex = util.sha1(task).hexdigest() return ( 'tasks/%s.stash' % stashhex, 'tasks/%s.dirstate' % stashhex )