def _rebundle(bundlerepo, bundleroots, unknownhead, cgversion, bundlecaps): """ Bundle may include more revision then user requested. For example, if user asks for revision but bundle also consists its descendants. This function will filter out all revision that user is not requested. """ parts = [] outgoing = discovery.outgoing(bundlerepo, commonheads=bundleroots, missingheads=[unknownhead]) cgstream = changegroup.makestream(bundlerepo, outgoing, cgversion, "pull", bundlecaps=bundlecaps) cgstream = util.chunkbuffer(cgstream).read() cgpart = bundle2.bundlepart("changegroup", data=cgstream) cgpart.addparam("version", cgversion) parts.append(cgpart) # This parsing should be refactored to be shared with # exchange.getbundlechunks. But I'll do that in a separate diff. if bundlecaps is None: bundlecaps = set() b2caps = {} for bcaps in bundlecaps: if bcaps.startswith("bundle2="): blob = util.urlreq.unquote(bcaps[len("bundle2="):]) b2caps.update(bundle2.decodecaps(blob)) if constants.scratchmutationparttype in b2caps: mutdata = mutation.bundle(bundlerepo, outgoing.missing) parts.append( bundle2.bundlepart(constants.scratchmutationparttype, data=mutdata)) try: treemod = extensions.find("treemanifest") remotefilelog = extensions.find("remotefilelog") except KeyError: pass else: missing = outgoing.missing if remotefilelog.shallowbundle.cansendtrees(bundlerepo, missing, source="pull", bundlecaps=bundlecaps, b2caps=b2caps): try: treepart = treemod.createtreepackpart( bundlerepo, outgoing, treemod.TREEGROUP_PARTTYPE2) parts.append(treepart) except BaseException as ex: parts.append(bundle2.createerrorpart(str(ex))) return parts
def bundle2scratchbranch(op, part): """unbundle a bundle2 part containing a changegroup to store""" bundler = bundle2.bundle20(op.repo.ui) cgversion = part.params.get("cgversion", "01") cgpart = bundle2.bundlepart("changegroup", data=part.read()) cgpart.addparam("version", cgversion) bundler.addpart(cgpart) buf = util.chunkbuffer(bundler.getchunks()) fd, bundlefile = tempfile.mkstemp() try: try: fp = util.fdopen(fd, "wb") fp.write(buf.read()) finally: fp.close() server.storebundle(op, part.params, bundlefile) finally: try: os.unlink(bundlefile) except OSError as e: if e.errno != errno.ENOENT: raise return 1
def getscratchbookmarkspart(peer, scratchbookmarks): if constants.scratchbookmarksparttype not in bundle2.bundle2caps(peer): raise error.Abort( _("no server support for %r") % constants.scratchbookmarksparttype) return bundle2.bundlepart( constants.scratchbookmarksparttype.upper(), data=bookmarks.encodebookmarks(scratchbookmarks), )
def _generateoutputparts( head, cgversion, bundlecaps, bundlerepo, bundleroots, bundlefile ): """generates bundle that will be send to the user returns tuple with raw bundle string and bundle type """ parts = [] if not _needsrebundling(head, bundlerepo): with util.posixfile(bundlefile, "rb") as f: unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile) if isinstance(unbundler, changegroup.cg1unpacker): part = bundle2.bundlepart("changegroup", data=unbundler._stream.read()) part.addparam("version", "01") parts.append(part) elif isinstance(unbundler, bundle2.unbundle20): haschangegroup = False for part in unbundler.iterparts(): if part.type == "changegroup": haschangegroup = True newpart = bundle2.bundlepart(part.type, data=part.read()) for key, value in pycompat.iteritems(part.params): newpart.addparam(key, value) parts.append(newpart) if not haschangegroup: raise error.Abort( "unexpected bundle without changegroup part, " + "head: %s" % hex(head), hint="report to administrator", ) else: raise error.Abort("unknown bundle type") else: parts = _rebundle(bundlerepo, bundleroots, head, cgversion, bundlecaps) return parts
def _getbundlegithgmappart(bundler, repo, source, bundlecaps=None, **kwargs): """send missing git to hg map data via bundle2""" if "fb_gitmeta" in bundlecaps: # Do nothing if the config indicates serving the complete git-hg map # file. _getbundlegitmetapart will handle serving the complete file in # this case. if not repo.ui.configbool("gitlookup", "onlymapdelta", False): return mapfile = _getfile(repo, gitmapfile) if not mapfile: return commonheads = kwargs["common"] # If there are missing heads, we will sync everything. if _isheadmissing(repo, commonheads): commonheads = [] needfullsync = len(commonheads) == 0 heads = repo.heads() newheads = set(hex(head) for head in heads) missingcommits = repo.changelog.findmissing(commonheads, heads) missinghashes = set(hex(commit) for commit in missingcommits) missinghashes.difference_update( set(repo.ui.configlist("gitlookup", "skiphashes", []))) missinglines = _getmissinglines(mapfile, missinghashes) payload = _githgmappayload(needfullsync, newheads, missinglines) serializedpayload = payload.tojson() part = bundle2.bundlepart( "b2x:fb:gitmeta:githgmap", [("filename", gitmapfile)], data=serializedpayload, ) bundler.addpart(part)
def _getbundlegitmetapart(bundler, repo, source, bundlecaps=None, **kwargs): """send git metadata via bundle2""" if "fb_gitmeta" in bundlecaps: filestooverwrite = gitmetafiles # Exclude the git-hg map file if the config indicates that the server # should only be serving the missing map data. _getbundle2partsgenerator # will serve the missing map data in this case. if repo.ui.configbool("gitlookup", "onlymapdelta", False): filestooverwrite = filestooverwrite - set([gitmapfile]) for fname in sorted(filestooverwrite): f = _getfile(repo, fname) if not f: continue part = bundle2.bundlepart( "b2x:fb:gitmeta", [("filename", fname)], data=f.read() ) bundler.addpart(part)
def _getscratchbranchpartsimpl( repo, peer, outgoing, confignonforwardmove, ui, bookmark, create, bookmarknode=None ): _validaterevset(repo, revsetlang.formatspec("%ln", outgoing.missing), bookmark) supportedversions = changegroup.supportedoutgoingversions(repo) # Explicitly avoid using '01' changegroup version in infinitepush to # support general delta supportedversions.discard("01") cgversion = min(supportedversions) _handlelfs(repo, outgoing.missing) cg = changegroup.makestream(repo, outgoing, cgversion, "push") params = {} params["cgversion"] = cgversion if bookmark: params["bookmark"] = bookmark if bookmarknode: params["bookmarknode"] = bookmarknode if create: params["create"] = "1" if confignonforwardmove: params["force"] = "1" parts = [] # .upper() marks this as a mandatory part: server will abort if there's no # handler parts.append( bundle2.bundlepart( constants.scratchbranchparttype.upper(), advisoryparams=pycompat.iteritems(params), data=cg, ) ) if mutation.enabled(repo): entries = mutation.entriesforbundle(repo, outgoing.missing) if entries: if constants.scratchmutationparttype not in bundle2.bundle2caps(peer): repo.ui.warn( _("no server support for %r - skipping\n") % constants.scratchmutationparttype ) else: parts.append( bundle2.bundlepart( constants.scratchmutationparttype, data=mutation.bundleentries(entries), ) ) try: treemod = extensions.find("treemanifest") remotefilelog = extensions.find("remotefilelog") sendtrees = remotefilelog.shallowbundle.cansendtrees(repo, outgoing.missing) if sendtrees != remotefilelog.shallowbundle.NoTrees: parts.append( treemod.createtreepackpart( repo, outgoing, treemod.TREEGROUP_PARTTYPE2, sendtrees=sendtrees ) ) except KeyError: pass try: snapshot = extensions.find("snapshot") except KeyError: pass else: snapshot.bundleparts.appendsnapshotmetadatabundlepart( repo, outgoing.missing, parts ) return parts
def processparts(orig, repo, op, unbundler): if unbundler.params.get("infinitepush") != "True": return orig(repo, op, unbundler) handleallparts = repo.ui.configbool("infinitepush", "storeallparts") partforwardingwhitelist = [constants.scratchmutationparttype] try: treemfmod = extensions.find("treemanifest") partforwardingwhitelist.append(treemfmod.TREEGROUP_PARTTYPE2) except KeyError: pass try: snapshot = extensions.find("snapshot") partforwardingwhitelist.append(snapshot.bundleparts.snapshotmetadataparttype) except KeyError: pass bundler = bundle2.bundle20(repo.ui) compress = repo.ui.config("infinitepush", "bundlecompression", "UN") bundler.setcompression(compress) cgparams = None scratchbookpart = None with bundle2.partiterator(repo, op, unbundler) as parts: for part in parts: bundlepart = None if part.type == "replycaps": # This configures the current operation to allow reply parts. bundle2._processpart(op, part) elif part.type == constants.scratchbranchparttype: # Scratch branch parts need to be converted to normal # changegroup parts, and the extra parameters stored for later # when we upload to the store. Eventually those parameters will # be put on the actual bundle instead of this part, then we can # send a vanilla changegroup instead of the scratchbranch part. cgversion = part.params.get("cgversion", "01") bundlepart = bundle2.bundlepart("changegroup", data=part.read()) bundlepart.addparam("version", cgversion) cgparams = part.params # If we're not dumping all parts into the new bundle, we need to # alert the future pushkey and phase-heads handler to skip # the part. if not handleallparts: op.records.add( constants.scratchbranchparttype + "_skippushkey", True ) op.records.add( constants.scratchbranchparttype + "_skipphaseheads", True ) elif part.type == constants.scratchbookmarksparttype: # Save this for later processing. Details below. # # Upstream https://phab.mercurial-scm.org/D1389 and its # follow-ups stop part.seek support to reduce memory usage # (https://bz.mercurial-scm.org/5691). So we need to copy # the part so it can be consumed later. scratchbookpart = copiedpart(part) else: if handleallparts or part.type in partforwardingwhitelist: # Ideally we would not process any parts, and instead just # forward them to the bundle for storage, but since this # differs from previous behavior, we need to put it behind a # config flag for incremental rollout. bundlepart = bundle2.bundlepart(part.type, data=part.read()) for key, value in pycompat.iteritems(part.params): bundlepart.addparam(key, value) # Certain parts require a response if part.type == "pushkey": if op.reply is not None: rpart = op.reply.newpart("reply:pushkey") rpart.addparam("in-reply-to", str(part.id), mandatory=False) rpart.addparam("return", "1", mandatory=False) else: bundle2._processpart(op, part) if handleallparts: op.records.add(part.type, {"return": 1}) if bundlepart: bundler.addpart(bundlepart) # If commits were sent, store them if cgparams: buf = util.chunkbuffer(bundler.getchunks()) fd, bundlefile = tempfile.mkstemp() try: try: fp = util.fdopen(fd, "wb") fp.write(buf.read()) finally: fp.close() storebundle(op, cgparams, bundlefile) finally: try: os.unlink(bundlefile) except Exception: # we would rather see the original exception pass # The scratch bookmark part is sent as part of a push backup. It needs to be # processed after the main bundle has been stored, so that any commits it # references are available in the store. if scratchbookpart: bundle2._processpart(op, scratchbookpart)
def appendsnapshotmetadatabundlepart(repo, revs, parts): """construct the bundlepart and append it to the list """ data = getmetadatafromrevs(repo, revs) if data: parts.append(bundle2.bundlepart(snapshotmetadataparttype, data=data))