def readytofetch(repo): """Check that enough time has passed since the last background prefetch. This only relates to prefetches after operations that change the working copy parent. Default delay between background prefetches is 2 minutes. """ timeout = repo.ui.configint("remotefilelog", "prefetchdelay", 120) fname = repo.localvfs.join("lastprefetch") ready = False with util.posixfile(fname, "a"): # the with construct above is used to avoid race conditions modtime = os.path.getmtime(fname) if (time.time() - modtime) > timeout: os.utime(fname, None) ready = True return ready
def _generateoutputparts( head, cgversion, bundlecaps, bundlerepo, bundleroots, bundlefile ): """generates bundle that will be send to the user returns tuple with raw bundle string and bundle type """ parts = [] if not _needsrebundling(head, bundlerepo): with util.posixfile(bundlefile, "rb") as f: unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile) if isinstance(unbundler, changegroup.cg1unpacker): part = bundle2.bundlepart("changegroup", data=unbundler._stream.read()) part.addparam("version", "01") parts.append(part) elif isinstance(unbundler, bundle2.unbundle20): haschangegroup = False for part in unbundler.iterparts(): if part.type == "changegroup": haschangegroup = True newpart = bundle2.bundlepart(part.type, data=part.read()) for key, value in pycompat.iteritems(part.params): newpart.addparam(key, value) parts.append(newpart) if not haschangegroup: raise error.Abort( "unexpected bundle without changegroup part, " + "head: %s" % hex(head), hint="report to administrator", ) else: raise error.Abort("unknown bundle type") else: parts = _rebundle(bundlerepo, bundleroots, head, cgversion, bundlecaps) return parts
def _loadfileblob(repo, path, node): usesimplecache = repo.ui.configbool("remotefilelog", "simplecacheserverstore") cachepath = repo.ui.config("remotefilelog", "servercachepath") if cachepath and usesimplecache: raise error.Abort( "remotefilelog.servercachepath and remotefilelog.simplecacheserverstore can't be both enabled" ) key = os.path.join(path, hex(node)) # simplecache store for remotefilelogcache if usesimplecache: try: simplecache = extensions.find("simplecache") except KeyError: raise error.Abort( "simplecache extension must be enabled with remotefilelog.simplecacheserverstore enabled" ) # this function doesn't raise exception text = simplecache.cacheget(key, trivialserializer, repo.ui) if text: return text else: text = readvalue(repo, path, node) # this function doesn't raise exception simplecache.cacheset(key, text, trivialserializer, repo.ui) return text # on disk store for remotefilelogcache if not cachepath: cachepath = os.path.join(repo.path, "remotefilelogcache") filecachepath = os.path.join(cachepath, key) if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0: text = readvalue(repo, path, node) # everything should be user & group read/writable oldumask = os.umask(0o002) try: dirname = os.path.dirname(filecachepath) if not os.path.exists(dirname): try: os.makedirs(dirname) except OSError as ex: if ex.errno != errno.EEXIST: raise f = None try: f = util.atomictempfile(filecachepath, "w") f.write(text) except (IOError, OSError): # Don't abort if the user only has permission to read, # and not write. pass finally: if f: f.close() finally: os.umask(oldumask) else: with util.posixfile(filecachepath, "r") as f: text = f.read() return text