def ispath(repo, string): """ The first argument to git checkout can either be a revision or a path. Let's generally assume it's a revision, unless it's obviously a path. There are too many ways to spell revisions in git for us to reasonably catch all of them, so let's be conservative. """ if scmutil.isrevsymbol(repo, string): # if it's definitely a revision let's not even check if a file of the # same name exists. return False cwd = repo.getcwd() if cwd == '': repopath = string else: repopath = cwd + '/' + string exists = repo.wvfs.exists(repopath) if exists: return True manifest = repo['.'].manifest() didexist = (repopath in manifest) or manifest.hasdir(repopath) return didexist
def pushlog_feed(web): """WebCommand for producing the ATOM feed of the pushlog.""" req = web.req req.qsparams['style'] = 'atom' # Need to reset the templater instance to use the new style. web.tmpl = web.templater(req) query = pushlog_setup(web.repo, req) if query.entries: dt = isotime(query.entries[0][2]) else: dt = datetime.utcnow().isoformat().split('.', 1)[0] + 'Z' url = req.apppath or '/' if not url.endswith('/'): url += '/' queryentries = ( (pushid, user, date, node) for (pushid, user, date, node) in query.entries if scmutil.isrevsymbol(web.repo, node) ) data = { 'urlbase': query.urlbase, 'url': url, 'repo': query.reponame, 'date': dt, 'entries': templateutil.mappinggenerator(feedentrygenerator, args=(queryentries, web.repo, url, query.urlbase)), } web.res.headers['Content-Type'] = ATOM_MIMETYPE return web.sendtemplate('pushlog', **data)
def checkconflict(self, mark, force=False, target=None): githead = _BMS_PREFIX + mark cur = self.gitrepo.references['HEAD'] if githead in self.gitrepo.references and not force: if target: if self.gitrepo.references[githead] == target and target == cur: # re-activating a bookmark return [] # moving a bookmark - forward? raise NotImplementedError raise error.Abort( _(b"bookmark '%s' already exists (use -f to force)") % mark ) if len(mark) > 3 and not force: try: shadowhash = scmutil.isrevsymbol(self._repo, mark) except error.LookupError: # ambiguous identifier shadowhash = False if shadowhash: self._repo.ui.warn( _( b"bookmark %s matches a changeset hash\n" b"(did you leave a -r out of an 'hg bookmark' " b"command?)\n" ) % mark ) return []
def pushlog_changelist(_context, web, query, tiponly): '''Generator which yields a entries in a changelist for the pushlog ''' parity = paritygen(web.stripecount) p = next(parity) # Iterate over query entries if we have not reached the limit and # the node is visible in the repo visiblequeryentries = ( (pushid, user, date, node) for pushid, user, date, node in query.entries if scmutil.isrevsymbol(web.repo, node) ) # FIFO queue. Accumulate pushes as we need to # count how many entries correspond with a given push samepush = collections.deque() # Get the first element of the query # return if there are no entries try: pushid, user, date, node = next(visiblequeryentries) lastid = pushid samepush.append( (pushid, user, date, node) ) except StopIteration: return # Iterate over all the non-hidden entries and aggregate # them together per unique pushid for allentry in visiblequeryentries: pushid, user, date, node = allentry # If the entries both come from the same push, add to the accumulated set of entries if pushid == lastid: samepush.append(allentry) # Once the pushid's are different, yield the result else: # If this is the first changeset for this push, put the change in the queue firstpush = len(samepush) == 0 if firstpush: samepush.append(allentry) for entry in handle_entries_for_push(web, samepush, p): yield entry if tiponly: return # Set the lastid lastid = pushid # Swap parity once we are on to processing another push p = next(parity) # Reset the aggregation of entries, as we are now processing a new push samepush = collections.deque() # If this was not the first push, the current entry needs processing # Add it to the queue here if not firstpush: samepush.append(allentry) # We don't need to display the remaining entries on the page if there are none if not samepush: return # Display the remaining entries for the page for entry in handle_entries_for_push(web, samepush, p): yield entry if tiponly: return
def backups(ui, repo, *pats, **opts): '''lists the changesets available in backup bundles Without any arguments, this command prints a list of the changesets in each backup bundle. --recover takes a changeset hash and unbundles the first bundle that contains that hash, which puts that changeset back in your repository. --verbose will print the entire commit message and the bundle path for that backup. ''' supportsmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt) if supportsmarkers and ui.configbool('backups', 'warnobsolescence', True): # Warn users of obsolescence markers that they probably don't want to # use backups but reflog instead ui.warn(msgwithcreatermarkers) backuppath = repo.vfs.join("strip-backup") backups = filter(os.path.isfile, glob.glob(backuppath + "/*.hg")) backups.sort(key=lambda x: os.path.getmtime(x), reverse=True) opts['bundle'] = '' opts['force'] = None if util.safehasattr(cmdutil, 'loglimit'): # legacy case loglimit = cmdutil.loglimit show_changeset = cmdutil.show_changeset else: # since core commit c8e2d6ed1f9e from mercurial import logcmdutil loglimit = logcmdutil.getlimit show_changeset = logcmdutil.changesetdisplayer def display(other, chlist, displayer): limit = loglimit(opts) if opts.get('newest_first'): chlist.reverse() count = 0 for n in chlist: if limit is not None and count >= limit: break parents = [p for p in other.changelog.parents(n) if p != nullid] if opts.get('no_merges') and len(parents) == 2: continue count += 1 displayer.show(other[n]) recovernode = opts.get('recover') if recovernode: if scmutil.isrevsymbol(repo, recovernode): ui.warn(_("%s already exists in the repo\n") % recovernode) return else: msg = _('Recover changesets using: hg backups --recover ' '<changeset hash>\n\nAvailable backup changesets:') ui.status(msg, label="status.removed") for backup in backups: # Much of this is copied from the hg incoming logic source = os.path.relpath(backup, pycompat.getcwd()) source = ui.expandpath(source) source, branches = hg.parseurl(source, opts.get('branch')) try: other = hg.peer(repo, opts, source) except error.LookupError as ex: msg = _("\nwarning: unable to open bundle %s") % source hint = _("\n(missing parent rev %s)\n") % short(ex.name) ui.warn(msg) ui.warn(hint) continue revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) if revs: revs = [other.lookup(rev) for rev in revs] quiet = ui.quiet try: ui.quiet = True other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other, revs, opts["bundle"], opts["force"]) except error.LookupError: continue finally: ui.quiet = quiet try: if chlist: if recovernode: tr = lock = None try: lock = repo.lock() if scmutil.isrevsymbol(other, recovernode): ui.status(_("Unbundling %s\n") % (recovernode)) f = hg.openpath(ui, source) gen = exchange.readbundle(ui, f, source) tr = repo.transaction("unbundle") if not isinstance(gen, bundle2.unbundle20): gen.apply(repo, 'unbundle', 'bundle:' + source) if isinstance(gen, bundle2.unbundle20): bundle2.applybundle(repo, gen, tr, source='unbundle', url='bundle:' + source) tr.close() break finally: lockmod.release(lock, tr) else: backupdate = os.path.getmtime(source) backupdate = time.strftime('%a %H:%M, %Y-%m-%d', time.localtime(backupdate)) ui.status("\n%s\n" % (backupdate.ljust(50))) if not ui.verbose: opts['template'] = verbosetemplate else: ui.status("%s%s\n" % ("bundle:".ljust(13), source)) displayer = show_changeset(ui, other, opts, False) display(other, chlist, displayer) displayer.close() finally: cleanupfn()