def record(self, namespace, name, oldhashes, newhashes): """Record a new journal entry * namespace: an opaque string; this can be used to filter on the type of recorded entries. * name: the name defining this entry; for bookmarks, this is the bookmark name. Can be filtered on when retrieving entries. * oldhashes and newhashes: each a single binary hash, or a list of binary hashes. These represent the old and new position of the named item. """ if not isinstance(oldhashes, list): oldhashes = [oldhashes] if not isinstance(newhashes, list): newhashes = [newhashes] entry = journalentry( dateutil.makedate(), self.user, self.command, namespace, name, oldhashes, newhashes, ) vfs = self.vfs if self.sharedvfs is not None: # write to the shared repository if this feature is being # shared between working copies. if sharednamespaces.get(namespace) in self.sharedfeatures: vfs = self.sharedvfs self._write(vfs, entry)
def template_pushdate(context, mapping): """:pushdate: Date information. When this changeset was pushed.""" repo = context.resource(mapping, 'repo') ctx = context.resource(mapping, 'ctx') cache = context.resource(mapping, 'cache') pushid, who, when, nodes = _getpushinfo(repo, ctx, cache) return dateutil.makedate(when) if when else None
def addpushmetadata(repo, ctx, d): if not hasattr(repo, 'pushlog'): return push = repo.pushlog.pushfromchangeset(ctx) if push: d['pushid'] = push.pushid d['pushuser'] = push.user d['pushdate'] = dateutil.makedate(push.when) d['pushnodes'] = push.nodes d['pushhead'] = push.nodes[-1]
def __call__(self, *args, **kwargs): for entry in kwargs.get('entries', []): push = web.repo.pushlog.pushfromnode(bin(entry[b'node'])) if push: entry[b'pushid'] = push.pushid entry[b'pushdate'] = dateutil.makedate(push.when) else: entry[b'pushid'] = None entry[b'pushdate'] = None return super(tmplwrapper, self).__call__(*args, **kwargs)
def listcmd(ui, repo, pats, opts): """subcommand that displays the list of shelves""" pats = set(pats) width = 80 if not ui.plain(): width = ui.termwidth() namelabel = 'shelve.newest' ui.pager('shelve') for mtime, name in listshelves(repo): sname = util.split(name)[1] if pats and sname not in pats: continue ui.write(sname, label=namelabel) namelabel = 'shelve.name' if ui.quiet: ui.write('\n') continue ui.write(' ' * (16 - len(sname))) used = 16 date = dateutil.makedate(mtime) age = '(%s)' % templatefilters.age(date, abbrev=True) ui.write(age, label='shelve.age') ui.write(' ' * (12 - len(age))) used += 12 with open(name + '.' + patchextension, 'rb') as fp: while True: line = fp.readline() if not line: break if not line.startswith('#'): desc = line.rstrip() if ui.formatted(): desc = stringutil.ellipsis(desc, width - used) ui.write(desc) break ui.write('\n') if not (opts['patch'] or opts['stat']): continue difflines = fp.readlines() if opts['patch']: for chunk, label in patch.difflabel(iter, difflines): ui.write(chunk, label=label) if opts['stat']: for chunk, label in patch.diffstatui(difflines, width=width): ui.write(chunk, label=label)
def synthesize(ui, repo, descpath, **opts): '''synthesize commits based on a model of an existing repository The model must have been generated by :hg:`analyze`. Commits will be generated randomly according to the probabilities described in the model. If --initfiles is set, the repository will be seeded with the given number files following the modeled repository's directory structure. When synthesizing new content, commit descriptions, and user names, words will be chosen randomly from a dictionary that is presumed to contain one word per line. Use --dict to specify the path to an alternate dictionary to use. ''' try: fp = hg.openpath(ui, descpath) except Exception as err: raise error.Abort('%s: %s' % (descpath, err[0].strerror)) desc = json.load(fp) fp.close() def cdf(l): if not l: return [], [] vals, probs = zip(*sorted(l, key=lambda x: x[1], reverse=True)) t = float(sum(probs, 0)) s, cdfs = 0, [] for v in probs: s += v cdfs.append(s / t) return vals, cdfs lineschanged = cdf(desc['lineschanged']) fileschanged = cdf(desc['fileschanged']) filesadded = cdf(desc['filesadded']) dirsadded = cdf(desc['dirsadded']) filesremoved = cdf(desc['filesremoved']) linelengths = cdf(desc['linelengths']) parents = cdf(desc['parents']) p1distance = cdf(desc['p1distance']) p2distance = cdf(desc['p2distance']) interarrival = cdf(desc['interarrival']) linesinfilesadded = cdf(desc['linesinfilesadded']) tzoffset = cdf(desc['tzoffset']) dictfile = opts.get('dict') or '/usr/share/dict/words' try: fp = open(dictfile, 'rU') except IOError as err: raise error.Abort('%s: %s' % (dictfile, err.strerror)) words = fp.read().splitlines() fp.close() initdirs = {} if desc['initdirs']: for k, v in desc['initdirs']: initdirs[k.encode('utf-8').replace('.hg', '_hg')] = v initdirs = renamedirs(initdirs, words) initdirscdf = cdf(initdirs) def pick(cdf): return cdf[0][bisect.bisect_left(cdf[1], random.random())] def pickpath(): return os.path.join(pick(initdirscdf), random.choice(words)) def makeline(minimum=0): total = max(minimum, pick(linelengths)) c, l = 0, [] while c < total: w = random.choice(words) c += len(w) + 1 l.append(w) return ' '.join(l) wlock = repo.wlock() lock = repo.lock() nevertouch = {'.hgsub', '.hgignore', '.hgtags'} _synthesizing = _('synthesizing') _files = _('initial files') _changesets = _('changesets') # Synthesize a single initial revision adding files to the repo according # to the modeled directory structure. initcount = int(opts['initfiles']) if initcount and initdirs: pctx = repo[None].parents()[0] dirs = set(pctx.dirs()) files = {} def validpath(path): # Don't pick filenames which are already directory names. if path in dirs: return False # Don't pick directories which were used as file names. while path: if path in files: return False path = os.path.dirname(path) return True progress = ui.makeprogress(_synthesizing, unit=_files, total=initcount) for i in xrange(0, initcount): progress.update(i) path = pickpath() while not validpath(path): path = pickpath() data = '%s contents\n' % path files[path] = data dir = os.path.dirname(path) while dir and dir not in dirs: dirs.add(dir) dir = os.path.dirname(dir) def filectxfn(repo, memctx, path): return context.memfilectx(repo, memctx, path, files[path]) progress.complete() message = 'synthesized wide repo with %d files' % (len(files), ) mc = context.memctx(repo, [pctx.node(), nullid], message, files, filectxfn, ui.username(), '%d %d' % dateutil.makedate()) initnode = mc.commit() if ui.debugflag: hexfn = hex else: hexfn = short ui.status( _('added commit %s with %d files\n') % (hexfn(initnode), len(files))) # Synthesize incremental revisions to the repository, adding repo depth. count = int(opts['count']) heads = set(map(repo.changelog.rev, repo.heads())) progress = ui.makeprogress(_synthesizing, unit=_changesets, total=count) for i in xrange(count): progress.update(i) node = repo.changelog.node revs = len(repo) def pickhead(heads, distance): if heads: lheads = sorted(heads) rev = revs - min(pick(distance), revs) if rev < lheads[-1]: rev = lheads[bisect.bisect_left(lheads, rev)] else: rev = lheads[-1] return rev, node(rev) return nullrev, nullid r1 = revs - min(pick(p1distance), revs) p1 = node(r1) # the number of heads will grow without bound if we use a pure # model, so artificially constrain their proliferation toomanyheads = len(heads) > random.randint(1, 20) if p2distance[0] and (pick(parents) == 2 or toomanyheads): r2, p2 = pickhead(heads.difference([r1]), p2distance) else: r2, p2 = nullrev, nullid pl = [p1, p2] pctx = repo[r1] mf = pctx.manifest() mfk = mf.keys() changes = {} if mfk: for __ in xrange(pick(fileschanged)): for __ in xrange(10): fctx = pctx.filectx(random.choice(mfk)) path = fctx.path() if not (path in nevertouch or fctx.isbinary() or 'l' in fctx.flags()): break lines = fctx.data().splitlines() add, remove = pick(lineschanged) for __ in xrange(remove): if not lines: break del lines[random.randrange(0, len(lines))] for __ in xrange(add): lines.insert(random.randint(0, len(lines)), makeline()) path = fctx.path() changes[path] = '\n'.join(lines) + '\n' for __ in xrange(pick(filesremoved)): path = random.choice(mfk) for __ in xrange(10): path = random.choice(mfk) if path not in changes: break if filesadded: dirs = list(pctx.dirs()) dirs.insert(0, '') for __ in xrange(pick(filesadded)): pathstr = '' while pathstr in dirs: path = [random.choice(dirs)] if pick(dirsadded): path.append(random.choice(words)) path.append(random.choice(words)) pathstr = '/'.join(filter(None, path)) data = '\n'.join(makeline() for __ in xrange(pick(linesinfilesadded))) + '\n' changes[pathstr] = data def filectxfn(repo, memctx, path): if path not in changes: return None return context.memfilectx(repo, memctx, path, changes[path]) if not changes: continue if revs: date = repo['tip'].date()[0] + pick(interarrival) else: date = time.time() - (86400 * count) # dates in mercurial must be positive, fit in 32-bit signed integers. date = min(0x7fffffff, max(0, date)) user = random.choice(words) + '@' + random.choice(words) mc = context.memctx(repo, pl, makeline(minimum=2), sorted(changes), filectxfn, user, '%d %d' % (date, pick(tzoffset))) newnode = mc.commit() heads.add(repo.changelog.rev(newnode)) heads.discard(r1) heads.discard(r2) progress.complete() lock.release() wlock.release()
def email(ui, repo, *revs, **opts): """send changesets by email By default, diffs are sent in the format generated by :hg:`export`, one per message. The series starts with a "[PATCH 0 of N]" introduction, which describes the series as a whole. Each patch email has a Subject line of "[PATCH M of N] ...", using the first line of the changeset description as the subject text. The message contains two or three parts. First, the changeset description. With the -d/--diffstat option, if the diffstat program is installed, the result of running diffstat on the patch is inserted. Finally, the patch itself, as generated by :hg:`export`. With the -d/--diffstat or --confirm options, you will be presented with a final summary of all messages and asked for confirmation before the messages are sent. By default the patch is included as text in the email body for easy reviewing. Using the -a/--attach option will instead create an attachment for the patch. With -i/--inline an inline attachment will be created. You can include a patch both as text in the email body and as a regular or an inline attachment by combining the -a/--attach or -i/--inline with the --body option. With -B/--bookmark changesets reachable by the given bookmark are selected. With -o/--outgoing, emails will be generated for patches not found in the destination repository (or only those which are ancestors of the specified revisions if any are provided) With -b/--bundle, changesets are selected as for --outgoing, but a single email containing a binary Mercurial bundle as an attachment will be sent. Use the ``patchbomb.bundletype`` config option to control the bundle type as with :hg:`bundle --type`. With -m/--mbox, instead of previewing each patchbomb message in a pager or sending the messages directly, it will create a UNIX mailbox file with the patch emails. This mailbox file can be previewed with any mail user agent which supports UNIX mbox files. With -n/--test, all steps will run, but mail will not be sent. You will be prompted for an email recipient address, a subject and an introductory message describing the patches of your patchbomb. Then when all is done, patchbomb messages are displayed. In case email sending fails, you will find a backup of your series introductory message in ``.hg/last-email.txt``. The default behavior of this command can be customized through configuration. (See :hg:`help patchbomb` for details) Examples:: hg email -r 3000 # send patch 3000 only hg email -r 3000 -r 3001 # send patches 3000 and 3001 hg email -r 3000:3005 # send patches 3000 through 3005 hg email 3000 # send patch 3000 (deprecated) hg email -o # send all patches not in default hg email -o DEST # send all patches not in DEST hg email -o -r 3000 # send all ancestors of 3000 not in default hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST hg email -B feature # send all ancestors of feature bookmark hg email -b # send bundle of all patches not in default hg email -b DEST # send bundle of all patches not in DEST hg email -b -r 3000 # bundle of all ancestors of 3000 not in default hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST hg email -o -m mbox && # generate an mbox file... mutt -R -f mbox # ... and view it with mutt hg email -o -m mbox && # generate an mbox file ... formail -s sendmail \\ # ... and use formail to send from the mbox -bm -t < mbox # ... using sendmail Before using this command, you will need to enable email in your hgrc. See the [email] section in hgrc(5) for details. """ opts = pycompat.byteskwargs(opts) _charsets = mail._charsets(ui) bundle = opts.get(b'bundle') date = opts.get(b'date') mbox = opts.get(b'mbox') outgoing = opts.get(b'outgoing') rev = opts.get(b'rev') bookmark = opts.get(b'bookmark') if not (opts.get(b'test') or mbox): # really sending mail.validateconfig(ui) if not (revs or rev or outgoing or bundle or bookmark): raise error.Abort( _(b'specify at least one changeset with -B, -r or -o')) if outgoing and bundle: raise error.Abort( _(b"--outgoing mode always on with --bundle;" b" do not re-specify --outgoing")) cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark') if outgoing or bundle: if len(revs) > 1: raise error.Abort(_(b"too many destinations")) if revs: dest = revs[0] else: dest = None revs = [] if rev: if revs: raise error.Abort(_(b'use only one form to specify the revision')) revs = rev elif bookmark: if bookmark not in repo._bookmarks: raise error.Abort(_(b"bookmark '%s' not found") % bookmark) revs = scmutil.bookmarkrevs(repo, bookmark) revs = scmutil.revrange(repo, revs) if outgoing: revs = _getoutgoing(repo, dest, revs) if bundle: opts[b'revs'] = [b"%d" % r for r in revs] # check if revision exist on the public destination publicurl = repo.ui.config(b'patchbomb', b'publicurl') if publicurl: repo.ui.debug(b'checking that revision exist in the public repo\n') try: publicpeer = hg.peer(repo, {}, publicurl) except error.RepoError: repo.ui.write_err( _(b'unable to access public repo: %s\n') % publicurl) raise if not publicpeer.capable(b'known'): repo.ui.debug(b'skipping existence checks: public repo too old\n') else: out = [repo[r] for r in revs] known = publicpeer.known(h.node() for h in out) missing = [] for idx, h in enumerate(out): if not known[idx]: missing.append(h) if missing: if len(missing) > 1: msg = _(b'public "%s" is missing %s and %i others') msg %= (publicurl, missing[0], len(missing) - 1) else: msg = _(b'public url %s is missing %s') msg %= (publicurl, missing[0]) missingrevs = [ctx.rev() for ctx in missing] revhint = b' '.join( b'-r %s' % h for h in repo.set(b'heads(%ld)', missingrevs)) hint = _(b"use 'hg push %s %s'") % (publicurl, revhint) raise error.Abort(msg, hint=hint) # start if date: start_time = dateutil.parsedate(date) else: start_time = dateutil.makedate() def genmsgid(id): return _msgid(id[:20], int(start_time[0])) # deprecated config: patchbomb.from sender = (opts.get(b'from') or ui.config(b'email', b'from') or ui.config(b'patchbomb', b'from') or prompt(ui, b'From', ui.username())) if bundle: stropts = pycompat.strkwargs(opts) bundledata = _getbundle(repo, dest, **stropts) bundleopts = stropts.copy() bundleopts.pop('bundle', None) # already processed msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts) else: msgs = _getpatchmsgs(repo, sender, revs, **pycompat.strkwargs(opts)) showaddrs = [] def getaddrs(header, ask=False, default=None): configkey = header.lower() opt = header.replace(b'-', b'_').lower() addrs = opts.get(opt) if addrs: showaddrs.append(b'%s: %s' % (header, b', '.join(addrs))) return mail.addrlistencode(ui, addrs, _charsets, opts.get(b'test')) # not on the command line: fallback to config and then maybe ask addr = ui.config(b'email', configkey) or ui.config( b'patchbomb', configkey) if not addr: specified = ui.hasconfig(b'email', configkey) or ui.hasconfig( b'patchbomb', configkey) if not specified and ask: addr = prompt(ui, header, default=default) if addr: showaddrs.append(b'%s: %s' % (header, addr)) return mail.addrlistencode(ui, [addr], _charsets, opts.get(b'test')) elif default: return mail.addrlistencode(ui, [default], _charsets, opts.get(b'test')) return [] to = getaddrs(b'To', ask=True) if not to: # we can get here in non-interactive mode raise error.Abort(_(b'no recipient addresses provided')) cc = getaddrs(b'Cc', ask=True, default=b'') bcc = getaddrs(b'Bcc') replyto = getaddrs(b'Reply-To') confirm = ui.configbool(b'patchbomb', b'confirm') confirm |= bool(opts.get(b'diffstat') or opts.get(b'confirm')) if confirm: ui.write(_(b'\nFinal summary:\n\n'), label=b'patchbomb.finalsummary') ui.write((b'From: %s\n' % sender), label=b'patchbomb.from') for addr in showaddrs: ui.write(b'%s\n' % addr, label=b'patchbomb.to') for m, subj, ds in msgs: ui.write((b'Subject: %s\n' % subj), label=b'patchbomb.subject') if ds: ui.write(ds, label=b'patchbomb.diffstats') ui.write(b'\n') if ui.promptchoice( _(b'are you sure you want to send (yn)?$$ &Yes $$ &No')): raise error.Abort(_(b'patchbomb canceled')) ui.write(b'\n') parent = opts.get(b'in_reply_to') or None # angle brackets may be omitted, they're not semantically part of the msg-id if parent is not None: parent = encoding.strfromlocal(parent) if not parent.startswith('<'): parent = '<' + parent if not parent.endswith('>'): parent += '>' sender_addr = eutil.parseaddr(encoding.strfromlocal(sender))[1] sender = mail.addressencode(ui, sender, _charsets, opts.get(b'test')) sendmail = None firstpatch = None progress = ui.makeprogress(_(b'sending'), unit=_(b'emails'), total=len(msgs)) for i, (m, subj, ds) in enumerate(msgs): try: m['Message-Id'] = genmsgid(m['X-Mercurial-Node']) if not firstpatch: firstpatch = m['Message-Id'] m['X-Mercurial-Series-Id'] = firstpatch except TypeError: m['Message-Id'] = genmsgid('patchbomb') if parent: m['In-Reply-To'] = parent m['References'] = parent if not parent or 'X-Mercurial-Node' not in m: parent = m['Message-Id'] m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version().decode() m['Date'] = eutil.formatdate(start_time[0], localtime=True) start_time = (start_time[0] + 1, start_time[1]) m['From'] = sender m['To'] = ', '.join(to) if cc: m['Cc'] = ', '.join(cc) if bcc: m['Bcc'] = ', '.join(bcc) if replyto: m['Reply-To'] = ', '.join(replyto) if opts.get(b'test'): ui.status(_(b'displaying '), subj, b' ...\n') ui.pager(b'email') generator = mail.Generator(ui, mangle_from_=False) try: generator.flatten(m, False) ui.write(b'\n') except IOError as inst: if inst.errno != errno.EPIPE: raise else: if not sendmail: sendmail = mail.connect(ui, mbox=mbox) ui.status(_(b'sending '), subj, b' ...\n') progress.update(i, item=subj) if not mbox: # Exim does not remove the Bcc field del m['Bcc'] fp = stringio() generator = mail.Generator(fp, mangle_from_=False) generator.flatten(m, False) alldests = to + bcc + cc sendmail(sender_addr, alldests, fp.getvalue()) progress.complete()