def fixuppath(ui, path, substitutions, announce=True): for ipprefixes, pathprefix, pathsubst in substitutions: if not _is_match_path(path, pathprefix): ui.debug( _("path %s didn't match prefix %s\n") % (util.hidepassword(path), util.hidepassword(pathprefix))) continue try: u = util.url(pathsubst) probehost = u.host or '1.0.0.1' except Exception: probehost = '1.0.0.1' for ip in localips(ui, probehost): if any( ipaddress.ip_address(unicode(ip)) in ipaddress.ip_network( unicode(ipprefix), False) for ipprefix in ipprefixes): new = _rewrite_path(path, pathsubst, pathprefix) if announce and not ui.quiet: ui.write_err( _("ip %s matched, " "path changed from %s to %s\n") % (ip, util.hidepassword(path), util.hidepassword(new))) return new ui.debug("ip %s does not match any of the ip prefixes %s\n" % (ip, ', '.join(ipprefixes))) ui.debug( _("path %s was not matched by any prefix\n" % util.hidepassword(path))) return path
def put(self, source, hash): if self.sendfile(source, hash): raise error.Abort( _('remotestore: could not put %s to remote store %s') % (source, util.hidepassword(self.url))) self.ui.debug( _('remotestore: put %s to remote store %s\n') % (source, util.hidepassword(self.url)))
def findoutgoing(ui, repo, remote=None, force=False, opts=None): """utility function to find the first outgoing changeset Used by initialization code""" if opts is None: opts = {} dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise error.Abort(_('no outgoing ancestors')) roots = list(repo.revs("roots(%ln)", outgoing.missing)) if 1 < len(roots): msg = _('there are ambiguous outgoing revisions') hint = _('see "hg help histedit" for more detail') raise error.Abort(msg, hint=hint) return repo.lookup(roots[0])
def longmessage(self): return _(b"error getting id %s from url %s for file %s: %s\n") % ( self.hash, util.hidepassword(self.url), self.filename, self.detail, )
def get(self, files): '''Get the specified largefiles from the store and write to local files under repo.root. files is a list of (filename, hash) tuples. Return (success, missing), lists of files successfully downloaded and those not found in the store. success is a list of (filename, hash) tuples; missing is a list of filenames that we could not get. (The detailed error message will already have been presented to the user, so missing is just supplied as a summary.)''' success = [] missing = [] ui = self.ui at = 0 available = self.exists(set(hash for (_filename, hash) in files)) for filename, hash in files: ui.progress(_('getting largefiles'), at, unit=_('files'), total=len(files)) at += 1 ui.note(_('getting %s:%s\n') % (filename, hash)) if not available.get(hash): ui.warn(_('%s: largefile %s not available from %s\n') % (filename, hash, util.hidepassword(self.url))) missing.append(filename) continue if self._gethash(filename, hash): success.append((filename, hash)) else: missing.append(filename) ui.progress(_('getting largefiles'), None) return (success, missing)
def get(self, files): '''Get the specified largefiles from the store and write to local files under repo.root. files is a list of (filename, hash) tuples. Return (success, missing), lists of files successfully downloaded and those not found in the store. success is a list of (filename, hash) tuples; missing is a list of filenames that we could not get. (The detailed error message will already have been presented to the user, so missing is just supplied as a summary.)''' success = [] missing = [] ui = self.ui at = 0 available = self.exists(set(hash for (_filename, hash) in files)) for filename, hash in files: ui.progress( _('getting largefiles'), at, unit='lfile', total=len(files)) at += 1 ui.note(_('getting %s:%s\n') % (filename, hash)) if not available.get(hash): ui.warn( _('%s: largefile %s not available from %s\n') % (filename, hash, util.hidepassword(self.url))) missing.append(filename) continue if self._gethash(filename, hash): success.append((filename, hash)) else: missing.append(filename) ui.progress(_('getting largefiles'), None) return (success, missing)
def longmessage(self): return _("error getting id %s from url %s for file %s: %s\n") % ( self.hash, util.hidepassword(self.url), self.filename, self.detail, )
def validate_synch_path(path, repo): ''' Validate the path that must be used to sync operations (pull, push, outgoing and incoming) ''' return_path = path for alias, path_aux in repo.ui.configitems('paths'): if path == alias: return_path = path_aux elif path == util.hidepassword(path_aux): return_path = path_aux return return_path
def openstore(repo=None, remote=None, put=False, ui=None): if ui is None: ui = repo.ui if not remote: lfpullsource = getattr(repo, 'lfpullsource', None) if lfpullsource: path = ui.expandpath(lfpullsource) elif put: path = ui.expandpath('default-push', 'default') else: path = ui.expandpath('default') # ui.expandpath() leaves 'default-push' and 'default' alone if # they cannot be expanded: fallback to the empty string, # meaning the current directory. if repo is None: path = ui.expandpath('default') path, _branches = hg.parseurl(path) remote = hg.peer(repo or ui, {}, path) elif path == 'default-push' or path == 'default': path = '' remote = repo else: path, _branches = hg.parseurl(path) remote = hg.peer(repo or ui, {}, path) # The path could be a scheme so use Mercurial's normal functionality # to resolve the scheme to a repository and use its path path = util.safehasattr(remote, 'url') and remote.url() or remote.path match = _scheme_re.match(path) if not match: # regular filesystem path scheme = 'file' else: scheme = match.group(1) try: storeproviders = _storeprovider[scheme] except KeyError: raise error.Abort(_('unsupported URL scheme %r') % scheme) for classobj in storeproviders: try: return classobj(ui, repo, remote) except lfutil.storeprotonotcapable: pass raise error.Abort( _('%s does not appear to be a largefile store') % util.hidepassword(path))
def getoutgoing(dest, revs): '''Return the revisions present locally but not in dest''' dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest) revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) other = hg.peer(repo, opts, dest) ui.status(_('comparing with %s\n') % util.hidepassword(dest)) common, _anyinc, _heads = discovery.findcommonincoming(repo, other) nodes = revs and map(repo.lookup, revs) or revs o = repo.changelog.findmissing(common, heads=nodes) if not o: ui.status(_("no changes found\n")) return [] return [str(repo.changelog.rev(r)) for r in o]
def _getoutgoing(repo, dest, revs): '''Return the revisions present locally but not in dest''' ui = repo.ui url = ui.expandpath(dest or 'default-push', dest or 'default') url = hg.parseurl(url)[0] ui.status(_('comparing with %s\n') % util.hidepassword(url)) revs = [r for r in revs if r >= 0] if not revs: revs = [len(repo) - 1] revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs) if not revs: ui.status(_("no changes found\n")) return revs
def pull(): cmdutil.setremoteconfig(ui, opts) other = hg.repository(ui, ui.expandpath(source)) ui.status(_('pulling from %s\n') % util.hidepassword(ui.expandpath(source))) revs = None if opts['rev']: if not other.local(): raise util.Abort(_("fetch -r doesn't work for remote " "repositories yet")) else: revs = [other.lookup(rev) for rev in opts['rev']] modheads = repo.pull(other, heads=revs) return postincoming(other, modheads)
def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urlerr.httperror as e: # 401s get converted to error.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) except urlerr.urlerror as e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. raise error.Abort('%s: %s' % (util.hidepassword(self.url), e.reason)) except IOError as e: raise basestore.StoreError(filename, hash, self.url, str(e)) return lfutil.copyandhash(chunks, tmpfile)
def _openstore(repo, remote=None, put=False): ui = repo.ui if not remote: lfpullsource = getattr(repo, 'lfpullsource', None) if lfpullsource: path = ui.expandpath(lfpullsource) elif put: path = ui.expandpath('default-push', 'default') else: path = ui.expandpath('default') # ui.expandpath() leaves 'default-push' and 'default' alone if # they cannot be expanded: fallback to the empty string, # meaning the current directory. if path == 'default-push' or path == 'default': path = '' remote = repo else: path, _branches = hg.parseurl(path) remote = hg.peer(repo, {}, path) # The path could be a scheme so use Mercurial's normal functionality # to resolve the scheme to a repository and use its path path = util.safehasattr(remote, 'url') and remote.url() or remote.path match = _scheme_re.match(path) if not match: # regular filesystem path scheme = 'file' else: scheme = match.group(1) try: storeproviders = _storeprovider[scheme] except KeyError: raise error.Abort(_('unsupported URL scheme %r') % scheme) for classobj in storeproviders: try: return classobj(ui, repo, remote) except lfutil.storeprotonotcapable: pass raise error.Abort( _('%s does not appear to be a largefile store') % util.hidepassword(path))
def findoutgoing(ui, repo, remote=None, force=False, opts={}): """utility function to find the first outgoing changeset Used by initialisation code""" dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] # hexlify nodes from outgoing, because we're going to parse # parent[0] using revsingle below, and if the binary hash # contains special revset characters like ":" the revset # parser can choke. outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise util.Abort(_('no outgoing ancestors')) return outgoing.missing[0]
def findoutgoing(ui, repo, remote=None, force=False, opts={}): """utility function to find the first outgoing changeset Used by initialisation code""" dest = ui.expandpath(remote or 'default-push', remote or 'default') dest, revs = hg.parseurl(dest, None)[:2] ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, revs, None) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(rev) for rev in revs] outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force) if not outgoing.missing: raise util.Abort(_('no outgoing ancestors')) roots = list(repo.revs("roots(%ln)", outgoing.missing)) if 1 < len(roots): msg = _('there are ambiguous outgoing revisions') hint = _('see "hg help histedit" for more detail') raise util.Abort(msg, hint=hint) return repo.lookup(roots[0])
def gitgetmeta(ui, repo, source='default'): '''get git metadata from a server that supports fb_gitmeta''' source, branch = hg.parseurl(ui.expandpath(source)) other = hg.peer(repo, {}, source) ui.status(_('getting git metadata from %s\n') % util.hidepassword(source)) kwargs = {'bundlecaps': exchange.caps20to10(repo)} capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo)) kwargs['bundlecaps'].add('bundle2=' + util.urlreq.quote(capsblob)) # this would ideally not be in the bundlecaps at all, but adding new kwargs # for wire transmissions is not possible as of Mercurial d19164a018a1 kwargs['bundlecaps'].add('fb_gitmeta') kwargs['heads'] = [nullid] kwargs['cg'] = False kwargs['common'] = _getcommonheads(repo) bundle = other.getbundle('pull', **kwargs) try: op = bundle2.processbundle(repo, bundle) except error.BundleValueError as exc: raise error.Abort('missing support for %s' % exc) writebytes = op.records['fb:gitmeta:writebytes'] ui.status(_('wrote %d files (%d bytes)\n') % (len(writebytes), sum(writebytes)))
def __str__(self): return "%s: %s" % (util.hidepassword(self.url), self.detail)
def fetch(ui, repo, source="default", **opts): """pull changes from a remote repository, merge new changes if needed. This finds all changes from the repository at the specified path or URL and adds them to the local repository. If the pulled changes add a new branch head, the head is automatically merged, and the result of the merge is committed. Otherwise, the working directory is updated to include the new changes. When a merge occurs, the newly pulled changes are assumed to be "authoritative". The head of the new changes is used as the first parent, with local changes as the second. To switch the merge order, use --switch-parent. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success. """ date = opts.get("date") if date: opts["date"] = util.parsedate(date) parent, p2 = repo.dirstate.parents() branch = repo.dirstate.branch() branchnode = repo.branchtags().get(branch) if parent != branchnode: raise util.Abort(_("working dir not at branch tip " '(use "hg update" to check out branch tip)')) if p2 != nullid: raise util.Abort(_("outstanding uncommitted merge")) wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() mod, add, rem, del_ = repo.status()[:4] if mod or add or rem: raise util.Abort(_("outstanding uncommitted changes")) if del_: raise util.Abort(_("working directory is missing some files")) bheads = repo.branchheads(branch) bheads = [head for head in bheads if len(repo[head].children()) == 0] if len(bheads) > 1: raise util.Abort(_("multiple heads in this branch " '(use "hg heads ." and "hg merge" to merge)')) other = hg.peer(repo, opts, ui.expandpath(source)) ui.status(_("pulling from %s\n") % util.hidepassword(ui.expandpath(source))) revs = None if opts["rev"]: try: revs = [other.lookup(rev) for rev in opts["rev"]] except error.CapabilityError: err = _("Other repository doesn't support revision lookup, " "so a rev cannot be specified.") raise util.Abort(err) # Are there any changes at all? modheads = repo.pull(other, heads=revs) if modheads == 0: return 0 # Is this a simple fast-forward along the current branch? newheads = repo.branchheads(branch) newchildren = repo.changelog.nodesbetween([parent], newheads)[2] if len(newheads) == 1: if newchildren[0] != parent: return hg.clean(repo, newchildren[0]) else: return 0 # Are there more than one additional branch heads? newchildren = [n for n in newchildren if n != parent] newparent = parent if newchildren: newparent = newchildren[0] hg.clean(repo, newparent) newheads = [n for n in newheads if n != newparent] if len(newheads) > 1: ui.status( _("not merging with %d other new branch heads " '(use "hg heads ." and "hg merge" to merge them)\n') % (len(newheads) - 1) ) return 1 # Otherwise, let's merge. err = False if newheads: # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. if opts["switch_parent"]: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent ui.status(_("updating to %d:%s\n") % (repo.changelog.rev(firstparent), short(firstparent))) hg.clean(repo, firstparent) ui.status(_("merging with %d:%s\n") % (repo.changelog.rev(secondparent), short(secondparent))) err = hg.merge(repo, secondparent, remind=False) if not err: # we don't translate commit messages message = cmdutil.logmessage(ui, opts) or ("Automated merge with %s" % util.removeauth(other.url())) editor = cmdutil.commiteditor if opts.get("force_editor") or opts.get("edit"): editor = cmdutil.commitforceeditor n = repo.commit(message, opts["user"], opts["date"], editor=editor) ui.status( _("new changeset %d:%s merges remote changes " "with local\n") % (repo.changelog.rev(n), short(n)) ) return err finally: release(lock, wlock)
def trackedcmd(ui, repo, remotepath=None, *pats, **opts): """show or change the current narrowspec With no argument, shows the current narrowspec entries, one per line. Each line will be prefixed with 'I' or 'X' for included or excluded patterns, respectively. The narrowspec is comprised of expressions to match remote files and/or directories that should be pulled into your client. The narrowspec has *include* and *exclude* expressions, with excludes always trumping includes: that is, if a file matches an exclude expression, it will be excluded even if it also matches an include expression. Excluding files that were never included has no effect. Each included or excluded entry is in the format described by 'hg help patterns'. The options allow you to add or remove included and excluded expressions. If --clear is specified, then all previous includes and excludes are DROPPED and replaced by the new ones specified to --addinclude and --addexclude. If --clear is specified without any further options, the narrowspec will be empty and will not match any files. """ opts = pycompat.byteskwargs(opts) if repository.NARROW_REQUIREMENT not in repo.requirements: ui.warn(_('The narrow command is only supported on respositories cloned' ' with --narrow.\n')) return 1 # Before supporting, decide whether it "hg tracked --clear" should mean # tracking no paths or all paths. if opts['clear']: ui.warn(_('The --clear option is not yet supported.\n')) return 1 # import rules from a file newrules = opts.get('import_rules') if newrules: try: filepath = os.path.join(encoding.getcwd(), newrules) fdata = util.readfile(filepath) except IOError as inst: raise error.Abort(_("cannot read narrowspecs from '%s': %s") % (filepath, encoding.strtolocal(inst.strerror))) includepats, excludepats, profiles = sparse.parseconfig(ui, fdata, 'narrow') if profiles: raise error.Abort(_("including other spec files using '%include' " "is not supported in narrowspec")) opts['addinclude'].extend(includepats) opts['addexclude'].extend(excludepats) addedincludes = narrowspec.parsepatterns(opts['addinclude']) removedincludes = narrowspec.parsepatterns(opts['removeinclude']) addedexcludes = narrowspec.parsepatterns(opts['addexclude']) removedexcludes = narrowspec.parsepatterns(opts['removeexclude']) only_show = not (addedincludes or removedincludes or addedexcludes or removedexcludes or newrules) oldincludes, oldexcludes = repo.narrowpats # filter the user passed additions and deletions into actual additions and # deletions of excludes and includes addedincludes -= oldincludes removedincludes &= oldincludes addedexcludes -= oldexcludes removedexcludes &= oldexcludes widening = addedincludes or removedexcludes narrowing = removedincludes or addedexcludes # Only print the current narrowspec. if only_show: ui.pager('tracked') fm = ui.formatter('narrow', opts) for i in sorted(oldincludes): fm.startitem() fm.write('status', '%s ', 'I', label='narrow.included') fm.write('pat', '%s\n', i, label='narrow.included') for i in sorted(oldexcludes): fm.startitem() fm.write('status', '%s ', 'X', label='narrow.excluded') fm.write('pat', '%s\n', i, label='narrow.excluded') fm.end() return 0 if not widening and not narrowing: ui.status(_("nothing to widen or narrow\n")) return 0 with repo.wlock(), repo.lock(): cmdutil.bailifchanged(repo) # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or 'default') url, branches = hg.parseurl(remotepath) ui.status(_('comparing with %s\n') % util.hidepassword(url)) remote = hg.peer(repo, opts, url) # check narrow support before doing anything if widening needs to be # performed. In future we should also abort if client is ellipses and # server does not support ellipses if widening and wireprototypes.NARROWCAP not in remote.capabilities(): raise error.Abort(_("server does not support narrow clones")) commoninc = discovery.findcommonincoming(repo, remote) if narrowing: newincludes = oldincludes - removedincludes newexcludes = oldexcludes | addedexcludes _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, opts['force_delete_local_changes']) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes # and addedexcludes will be lost in the resulting narrowspec) oldincludes = newincludes oldexcludes = newexcludes if widening: newincludes = oldincludes | addedincludes newexcludes = oldexcludes - removedexcludes _widen(ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes) return 0
def fetch(ui, repo, source='default', **opts): '''pull changes from a remote repository, merge new changes if needed. This finds all changes from the repository at the specified path or URL and adds them to the local repository. If the pulled changes add a new branch head, the head is automatically merged, and the result of the merge is committed. Otherwise, the working directory is updated to include the new changes. When a merge occurs, the newly pulled changes are assumed to be "authoritative". The head of the new changes is used as the first parent, with local changes as the second. To switch the merge order, use --switch-parent. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success. ''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) parent, p2 = repo.dirstate.parents() branch = repo.dirstate.branch() branchnode = repo.branchtags().get(branch) if parent != branchnode: raise util.Abort( _('working dir not at branch tip ' '(use "hg update" to check out branch tip)')) if p2 != nullid: raise util.Abort(_('outstanding uncommitted merge')) wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() mod, add, rem, del_ = repo.status()[:4] if mod or add or rem: raise util.Abort(_('outstanding uncommitted changes')) if del_: raise util.Abort(_('working directory is missing some files')) bheads = repo.branchheads(branch) bheads = [head for head in bheads if len(repo[head].children()) == 0] if len(bheads) > 1: raise util.Abort( _('multiple heads in this branch ' '(use "hg heads ." and "hg merge" to merge)')) other = hg.peer(repo, opts, ui.expandpath(source)) ui.status( _('pulling from %s\n') % util.hidepassword(ui.expandpath(source))) revs = None if opts['rev']: try: revs = [other.lookup(rev) for rev in opts['rev']] except error.CapabilityError: err = _("Other repository doesn't support revision lookup, " "so a rev cannot be specified.") raise util.Abort(err) # Are there any changes at all? modheads = repo.pull(other, heads=revs) if modheads == 0: return 0 # Is this a simple fast-forward along the current branch? newheads = repo.branchheads(branch) newchildren = repo.changelog.nodesbetween([parent], newheads)[2] if len(newheads) == 1: if newchildren[0] != parent: return hg.clean(repo, newchildren[0]) else: return 0 # Are there more than one additional branch heads? newchildren = [n for n in newchildren if n != parent] newparent = parent if newchildren: newparent = newchildren[0] hg.clean(repo, newparent) newheads = [n for n in newheads if n != newparent] if len(newheads) > 1: ui.status( _('not merging with %d other new branch heads ' '(use "hg heads ." and "hg merge" to merge them)\n') % (len(newheads) - 1)) return 1 # Otherwise, let's merge. err = False if newheads: # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. if opts['switch_parent']: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent ui.status( _('updating to %d:%s\n') % (repo.changelog.rev(firstparent), short(firstparent))) hg.clean(repo, firstparent) ui.status( _('merging with %d:%s\n') % (repo.changelog.rev(secondparent), short(secondparent))) err = hg.merge(repo, secondparent, remind=False) if not err: # we don't translate commit messages message = (cmdutil.logmessage(ui, opts) or ('Automated merge with %s' % util.removeauth(other.url()))) editor = cmdutil.commiteditor if opts.get('force_editor') or opts.get('edit'): editor = cmdutil.commitforceeditor n = repo.commit(message, opts['user'], opts['date'], editor=editor) ui.status( _('new changeset %d:%s merges remote changes ' 'with local\n') % (repo.changelog.rev(n), short(n))) return err finally: release(lock, wlock)
def push(self): self.ui.status(_('pushing to %s\n') % util.hidepassword(self.ui.expandpath(self.source))) #If it's zero, it failed with an http error. If it's None, or basically any other number, we're not sure what happened, but trust it to have raised an exception if necessary. if exchange.push(self.repo, self.remoterepository).cgresult == 0: raise util.Abort('push failed, please check your synchronization settings') self.ui.status('push complete\n')
def fetch(ui, repo, source=b'default', **opts): """pull changes from a remote repository, merge new changes if needed. This finds all changes from the repository at the specified path or URL and adds them to the local repository. If the pulled changes add a new branch head, the head is automatically merged, and the result of the merge is committed. Otherwise, the working directory is updated to include the new changes. When a merge is needed, the working directory is first updated to the newly pulled changes. Local changes are then merged into the pulled changes. To switch the merge order, use --switch-parent. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success. """ opts = pycompat.byteskwargs(opts) date = opts.get(b'date') if date: opts[b'date'] = dateutil.parsedate(date) parent = repo.dirstate.p1() branch = repo.dirstate.branch() try: branchnode = repo.branchtip(branch) except error.RepoLookupError: branchnode = None if parent != branchnode: raise error.Abort( _(b'working directory not at branch tip'), hint=_(b"use 'hg update' to check out branch tip"), ) wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() cmdutil.bailifchanged(repo) bheads = repo.branchheads(branch) bheads = [head for head in bheads if len(repo[head].children()) == 0] if len(bheads) > 1: raise error.Abort( _(b'multiple heads in this branch ' b'(use "hg heads ." and "hg merge" to merge)')) other = hg.peer(repo, opts, ui.expandpath(source)) ui.status( _(b'pulling from %s\n') % util.hidepassword(ui.expandpath(source))) revs = None if opts[b'rev']: try: revs = [other.lookup(rev) for rev in opts[b'rev']] except error.CapabilityError: err = _(b"other repository doesn't support revision lookup, " b"so a rev cannot be specified.") raise error.Abort(err) # Are there any changes at all? modheads = exchange.pull(repo, other, heads=revs).cgresult if modheads == 0: return 0 # Is this a simple fast-forward along the current branch? newheads = repo.branchheads(branch) newchildren = repo.changelog.nodesbetween([parent], newheads)[2] if len(newheads) == 1 and len(newchildren): if newchildren[0] != parent: return hg.update(repo, newchildren[0]) else: return 0 # Are there more than one additional branch heads? newchildren = [n for n in newchildren if n != parent] newparent = parent if newchildren: newparent = newchildren[0] hg.clean(repo, newparent) newheads = [n for n in newheads if n != newparent] if len(newheads) > 1: ui.status( _(b'not merging with %d other new branch heads ' b'(use "hg heads ." and "hg merge" to merge them)\n') % (len(newheads) - 1)) return 1 if not newheads: return 0 # Otherwise, let's merge. err = False if newheads: # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. if opts[b'switch_parent']: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent ui.status( _(b'updating to %d:%s\n') % (repo.changelog.rev(firstparent), short(firstparent))) hg.clean(repo, firstparent) p2ctx = repo[secondparent] ui.status( _(b'merging with %d:%s\n') % (p2ctx.rev(), short(secondparent))) err = hg.merge(p2ctx, remind=False) if not err: # we don't translate commit messages message = cmdutil.logmessage( ui, opts) or (b'Automated merge with %s' % util.removeauth(other.url())) editopt = opts.get(b'edit') or opts.get(b'force_editor') editor = cmdutil.getcommiteditor(edit=editopt, editform=b'fetch') n = repo.commit(message, opts[b'user'], opts[b'date'], editor=editor) ui.status( _(b'new changeset %d:%s merges remote changes with local\n') % (repo.changelog.rev(n), short(n))) return err finally: release(lock, wlock)
def trackedcmd(ui, repo, remotepath=None, *pats, **opts): """show or change the current narrowspec With no argument, shows the current narrowspec entries, one per line. Each line will be prefixed with 'I' or 'X' for included or excluded patterns, respectively. The narrowspec is comprised of expressions to match remote files and/or directories that should be pulled into your client. The narrowspec has *include* and *exclude* expressions, with excludes always trumping includes: that is, if a file matches an exclude expression, it will be excluded even if it also matches an include expression. Excluding files that were never included has no effect. Each included or excluded entry is in the format described by 'hg help patterns'. The options allow you to add or remove included and excluded expressions. If --clear is specified, then all previous includes and excludes are DROPPED and replaced by the new ones specified to --addinclude and --addexclude. If --clear is specified without any further options, the narrowspec will be empty and will not match any files. """ opts = pycompat.byteskwargs(opts) if changegroup.NARROW_REQUIREMENT not in repo.requirements: ui.warn( _('The narrow command is only supported on respositories cloned' ' with --narrow.\n')) return 1 # Before supporting, decide whether it "hg tracked --clear" should mean # tracking no paths or all paths. if opts['clear']: ui.warn(_('The --clear option is not yet supported.\n')) return 1 if narrowspec.needsexpansion(opts['addinclude'] + opts['addexclude']): raise error.Abort('Expansion not yet supported on widen/narrow') addedincludes = narrowspec.parsepatterns(opts['addinclude']) removedincludes = narrowspec.parsepatterns(opts['removeinclude']) addedexcludes = narrowspec.parsepatterns(opts['addexclude']) removedexcludes = narrowspec.parsepatterns(opts['removeexclude']) widening = addedincludes or removedexcludes narrowing = removedincludes or addedexcludes only_show = not widening and not narrowing # Only print the current narrowspec. if only_show: include, exclude = repo.narrowpats ui.pager('tracked') fm = ui.formatter('narrow', opts) for i in sorted(include): fm.startitem() fm.write('status', '%s ', 'I', label='narrow.included') fm.write('pat', '%s\n', i, label='narrow.included') for i in sorted(exclude): fm.startitem() fm.write('status', '%s ', 'X', label='narrow.excluded') fm.write('pat', '%s\n', i, label='narrow.excluded') fm.end() return 0 with repo.wlock(), repo.lock(): cmdutil.bailifchanged(repo) # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or 'default') url, branches = hg.parseurl(remotepath) ui.status(_('comparing with %s\n') % util.hidepassword(url)) remote = hg.peer(repo, opts, url) commoninc = discovery.findcommonincoming(repo, remote) oldincludes, oldexcludes = repo.narrowpats if narrowing: newincludes = oldincludes - removedincludes newexcludes = oldexcludes | addedexcludes _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, opts['force_delete_local_changes']) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes # and addedexcludes will be lost in the resulting narrowspec) oldincludes = newincludes oldexcludes = newexcludes if widening: newincludes = oldincludes | addedincludes newexcludes = oldexcludes - removedexcludes _widen(ui, repo, remote, commoninc, newincludes, newexcludes) return 0
if fd: fd.close() def _getfile(self, tmpfile, filename, hash): try: chunks = self._get(hash) except urllib2.HTTPError, e: # 401s get converted to util.Aborts; everything else is fine being # turned into a StoreError raise basestore.StoreError(filename, hash, self.url, str(e)) except urllib2.URLError, e: # This usually indicates a connection problem, so don't # keep trying with the other files... they will probably # all fail too. raise util.Abort('%s: %s' % (util.hidepassword(self.url), e.reason)) except IOError, e: raise basestore.StoreError(filename, hash, self.url, str(e)) return lfutil.copyandhash(chunks, tmpfile) def _verifyfile(self, cctx, cset, contents, standin, verified): filename = lfutil.splitstandin(standin) if not filename: return False fctx = cctx[standin] key = (filename, fctx.filenode()) if key in verified: return False verified.add(key)
def fetch(ui, repo, source='default', **opts): '''pull changes from a remote repository, merge new changes if needed. This finds all changes from the repository at the specified path or URL and adds them to the local repository. If the pulled changes add a new branch head, the head is automatically merged, and the result of the merge is committed. Otherwise, the working directory is updated to include the new changes. When a merge is needed, the working directory is first updated to the newly pulled changes. Local changes are then merged into the pulled changes. To switch the merge order, use --switch-parent. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success. ''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) parent, _p2 = repo.dirstate.parents() branch = repo.dirstate.branch() try: branchnode = repo.branchtip(branch) except error.RepoLookupError: branchnode = None if parent != branchnode: raise util.Abort(_('working dir not at branch tip ' '(use "hg update" to check out branch tip)')) wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() cmdutil.bailifchanged(repo) bheads = repo.branchheads(branch) bheads = [head for head in bheads if len(repo[head].children()) == 0] if len(bheads) > 1: raise util.Abort(_('multiple heads in this branch ' '(use "hg heads ." and "hg merge" to merge)')) other = hg.peer(repo, opts, ui.expandpath(source)) ui.status(_('pulling from %s\n') % util.hidepassword(ui.expandpath(source))) revs = None if opts['rev']: try: revs = [other.lookup(rev) for rev in opts['rev']] except error.CapabilityError: err = _("other repository doesn't support revision lookup, " "so a rev cannot be specified.") raise util.Abort(err) # Are there any changes at all? modheads = exchange.pull(repo, other, heads=revs).cgresult if modheads == 0: return 0 # Is this a simple fast-forward along the current branch? newheads = repo.branchheads(branch) newchildren = repo.changelog.nodesbetween([parent], newheads)[2] if len(newheads) == 1 and len(newchildren): if newchildren[0] != parent: return hg.update(repo, newchildren[0]) else: return 0 # Are there more than one additional branch heads? newchildren = [n for n in newchildren if n != parent] newparent = parent if newchildren: newparent = newchildren[0] hg.clean(repo, newparent) newheads = [n for n in newheads if n != newparent] if len(newheads) > 1: ui.status(_('not merging with %d other new branch heads ' '(use "hg heads ." and "hg merge" to merge them)\n') % (len(newheads) - 1)) return 1 if not newheads: return 0 # Otherwise, let's merge. err = False if newheads: # By default, we consider the repository we're pulling # *from* as authoritative, so we merge our changes into # theirs. if opts['switch_parent']: firstparent, secondparent = newparent, newheads[0] else: firstparent, secondparent = newheads[0], newparent ui.status(_('updating to %d:%s\n') % (repo.changelog.rev(firstparent), short(firstparent))) hg.clean(repo, firstparent) ui.status(_('merging with %d:%s\n') % (repo.changelog.rev(secondparent), short(secondparent))) err = hg.merge(repo, secondparent, remind=False) if not err: # we don't translate commit messages message = (cmdutil.logmessage(ui, opts) or ('Automated merge with %s' % util.removeauth(other.url()))) editopt = opts.get('edit') or opts.get('force_editor') editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch') n = repo.commit(message, opts['user'], opts['date'], editor=editor) ui.status(_('new changeset %d:%s merges remote changes ' 'with local\n') % (repo.changelog.rev(n), short(n))) return err finally: release(lock, wlock)
def dopull(self): self.ui.status(_('pulling from %s\n') % util.hidepassword(self.ui.expandpath(self.source))) exchange.pull(self.repo, self.remoterepository) self.ui.write('pull complete\n')
def kpush(ui, repo, bookmark=None, force=False, new_bookmark=False, **opts): """Push the current changeset (.) to the specified bookmark on the default push remote. Returns 0 if push was successful, 1 on error. """ if bookmarks.listbookmarks(repo): raise util.Abort("local repo must not have any bookmarks") # First, push the changeset dest = ui.expandpath('default-push', 'default') dest, _ = hg.parseurl(dest) ui.status("pushing to %s\n" % util.hidepassword(dest)) remote = hg.peer(repo, opts, dest) head = repo['.'] # Push subrepos, copied from commands.push # TODO(alpert): What is this _subtoppath craziness? repo._subtoppath = dest try: # Push subrepos depth-first for coherent ordering subs = head.substate # Only repos that are committed for s in sorted(subs): if head.sub(s).push(opts) == 0: return False finally: del repo._subtoppath result = repo.push(remote, force, revs=[head.node()]) result = not result # Uh, okay... # Then, update the bookmark bookmark = _read_bookmark(repo) if bookmark is None else bookmark remote_books = remote.listkeys('bookmarks') new_node = node.hex(repo.lookup('.')) if bookmark in remote_books: old_node = remote_books[bookmark] if new_node == old_node: ui.status("nothing to update\n") return 0 elif repo[new_node] in repo[old_node].descendants(): ui.status("updating bookmark %s\n" % bookmark) elif force: ui.status("force-updating bookmark %s\n" % bookmark) else: ui.warn("skipping non-fast-forward update of bookmark %s\n" % bookmark) return 1 elif new_bookmark: old_node = '' ui.status("creating bookmark %s\n" % bookmark) else: ui.warn('remote bookmark %r not found: did you want --new-bookmark?\n' % bookmark) return 1 r = remote.pushkey('bookmarks', bookmark, old_node, new_node) if not r: # Either someone else pushed at the same time as us or new_node doesn't # exist in the remote repo (see bookmarks.pushbookmark). ui.warn("updating bookmark %s failed!\n" % bookmark) return 1 return 0
def trackedcmd(ui, repo, remotepath=None, *pats, **opts): """show or change the current narrowspec With no argument, shows the current narrowspec entries, one per line. Each line will be prefixed with 'I' or 'X' for included or excluded patterns, respectively. The narrowspec is comprised of expressions to match remote files and/or directories that should be pulled into your client. The narrowspec has *include* and *exclude* expressions, with excludes always trumping includes: that is, if a file matches an exclude expression, it will be excluded even if it also matches an include expression. Excluding files that were never included has no effect. Each included or excluded entry is in the format described by 'hg help patterns'. The options allow you to add or remove included and excluded expressions. If --clear is specified, then all previous includes and excludes are DROPPED and replaced by the new ones specified to --addinclude and --addexclude. If --clear is specified without any further options, the narrowspec will be empty and will not match any files. If --auto-remove-includes is specified, then those includes that don't match any files modified by currently visible local commits (those not shared by the remote) will be added to the set of explicitly specified includes to remove. --import-rules accepts a path to a file containing rules, allowing you to add --addinclude, --addexclude rules in bulk. Like the other include and exclude switches, the changes are applied immediately. """ opts = pycompat.byteskwargs(opts) if repository.NARROW_REQUIREMENT not in repo.requirements: raise error.Abort( _(b'the tracked command is only supported on ' b'repositories cloned with --narrow')) # Before supporting, decide whether it "hg tracked --clear" should mean # tracking no paths or all paths. if opts[b'clear']: raise error.Abort(_(b'the --clear option is not yet supported')) # import rules from a file newrules = opts.get(b'import_rules') if newrules: try: filepath = os.path.join(encoding.getcwd(), newrules) fdata = util.readfile(filepath) except IOError as inst: raise error.Abort( _(b"cannot read narrowspecs from '%s': %s") % (filepath, encoding.strtolocal(inst.strerror))) includepats, excludepats, profiles = sparse.parseconfig( ui, fdata, b'narrow') if profiles: raise error.Abort( _(b"including other spec files using '%include' " b"is not supported in narrowspec")) opts[b'addinclude'].extend(includepats) opts[b'addexclude'].extend(excludepats) addedincludes = narrowspec.parsepatterns(opts[b'addinclude']) removedincludes = narrowspec.parsepatterns(opts[b'removeinclude']) addedexcludes = narrowspec.parsepatterns(opts[b'addexclude']) removedexcludes = narrowspec.parsepatterns(opts[b'removeexclude']) autoremoveincludes = opts[b'auto_remove_includes'] update_working_copy = opts[b'update_working_copy'] only_show = not (addedincludes or removedincludes or addedexcludes or removedexcludes or newrules or autoremoveincludes or update_working_copy) oldincludes, oldexcludes = repo.narrowpats # filter the user passed additions and deletions into actual additions and # deletions of excludes and includes addedincludes -= oldincludes removedincludes &= oldincludes addedexcludes -= oldexcludes removedexcludes &= oldexcludes widening = addedincludes or removedexcludes narrowing = removedincludes or addedexcludes # Only print the current narrowspec. if only_show: ui.pager(b'tracked') fm = ui.formatter(b'narrow', opts) for i in sorted(oldincludes): fm.startitem() fm.write(b'status', b'%s ', b'I', label=b'narrow.included') fm.write(b'pat', b'%s\n', i, label=b'narrow.included') for i in sorted(oldexcludes): fm.startitem() fm.write(b'status', b'%s ', b'X', label=b'narrow.excluded') fm.write(b'pat', b'%s\n', i, label=b'narrow.excluded') fm.end() return 0 if update_working_copy: with repo.wlock(), repo.lock(), repo.transaction(b'narrow-wc'): narrowspec.updateworkingcopy(repo) narrowspec.copytoworkingcopy(repo) return 0 if not (widening or narrowing or autoremoveincludes): ui.status(_(b"nothing to widen or narrow\n")) return 0 with repo.wlock(), repo.lock(): cmdutil.bailifchanged(repo) # Find the revisions we have in common with the remote. These will # be used for finding local-only changes for narrowing. They will # also define the set of revisions to update for widening. remotepath = ui.expandpath(remotepath or b'default') url, branches = hg.parseurl(remotepath) ui.status(_(b'comparing with %s\n') % util.hidepassword(url)) remote = hg.peer(repo, opts, url) # check narrow support before doing anything if widening needs to be # performed. In future we should also abort if client is ellipses and # server does not support ellipses if widening and wireprototypes.NARROWCAP not in remote.capabilities(): raise error.Abort(_(b"server does not support narrow clones")) commoninc = discovery.findcommonincoming(repo, remote) if autoremoveincludes: outgoing = discovery.findcommonoutgoing(repo, remote, commoninc=commoninc) ui.status(_(b'looking for unused includes to remove\n')) localfiles = set() for n in itertools.chain(outgoing.missing, outgoing.excluded): localfiles.update(repo[n].files()) suggestedremovals = [] for include in sorted(oldincludes): match = narrowspec.match(repo.root, [include], oldexcludes) if not any(match(f) for f in localfiles): suggestedremovals.append(include) if suggestedremovals: for s in suggestedremovals: ui.status(b'%s\n' % s) if (ui.promptchoice( _(b'remove these unused includes (yn)?' b'$$ &Yes $$ &No')) == 0): removedincludes.update(suggestedremovals) narrowing = True else: ui.status(_(b'found no unused includes\n')) if narrowing: newincludes = oldincludes - removedincludes newexcludes = oldexcludes | addedexcludes _narrow( ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, opts[b'force_delete_local_changes'], ) # _narrow() updated the narrowspec and _widen() below needs to # use the updated values as its base (otherwise removed includes # and addedexcludes will be lost in the resulting narrowspec) oldincludes = newincludes oldexcludes = newexcludes if widening: newincludes = oldincludes | addedincludes newexcludes = oldexcludes - removedexcludes _widen( ui, repo, remote, commoninc, oldincludes, oldexcludes, newincludes, newexcludes, ) return 0